comment
stringlengths
16
8.84k
method_body
stringlengths
37
239k
target_code
stringlengths
0
242
method_body_after
stringlengths
29
239k
context_before
stringlengths
14
424k
context_after
stringlengths
14
284k
@malinthar let's push this as a separate PR.
private List<PackageInfo> getPackagesFromRepository(PackageRepository repository, List<String> skipList) { Map<String, List<String>> packageMap = repository.getPackages(); List<PackageInfo> packages = new ArrayList<>(); packageMap.forEach((key, value) -> { if (key.equals(Names.BALLERINA_INTERNAL_ORG.getValue())) { return; } value.forEach(nameEntry -> { String[] components = nameEntry.split(":"); if (components.length != 2 || skipList.contains(components[0])) { return; } String nameComponent = components[0]; String version = components[1]; PackageOrg packageOrg = PackageOrg.from(key); PackageName packageName = PackageName.from(nameComponent); PackageVersion pkgVersion = PackageVersion.from(version); try { PackageDescriptor pkdDesc = PackageDescriptor.from(packageOrg, packageName, pkgVersion); ResolutionRequest request = ResolutionRequest.from(pkdDesc, PackageDependencyScope.DEFAULT); Optional<Package> repoPackage = repository.getPackage(request, ResolutionOptions.builder().setOffline(true).build()); repoPackage.ifPresent(pkg -> packages.add(new PackageInfo(pkg))); } catch (Throwable e) { clientLogger.logTrace("Failed to resolve package " + packageOrg + (!packageOrg.value().isEmpty() ? "/" : "" + packageName + ":" + pkgVersion)); } }); }); return packages; }
clientLogger.logTrace("Failed to resolve package "
private List<PackageInfo> getPackagesFromRepository(PackageRepository repository, List<String> skipList) { Map<String, List<String>> packageMap = repository.getPackages(); List<PackageInfo> packages = new ArrayList<>(); packageMap.forEach((key, value) -> { if (key.equals(Names.BALLERINA_INTERNAL_ORG.getValue())) { return; } value.forEach(nameEntry -> { String[] components = nameEntry.split(":"); if (components.length != 2 || skipList.contains(components[0])) { return; } String nameComponent = components[0]; String version = components[1]; PackageOrg packageOrg = PackageOrg.from(key); PackageName packageName = PackageName.from(nameComponent); PackageVersion pkgVersion = PackageVersion.from(version); try { PackageDescriptor pkdDesc = PackageDescriptor.from(packageOrg, packageName, pkgVersion); ResolutionRequest request = ResolutionRequest.from(pkdDesc, PackageDependencyScope.DEFAULT); Optional<Package> repoPackage = repository.getPackage(request, ResolutionOptions.builder().setOffline(true).build()); repoPackage.ifPresent(pkg -> packages.add(new PackageInfo(pkg))); } catch (Throwable e) { clientLogger.logTrace("Failed to resolve package " + packageOrg + (!packageOrg.value().isEmpty() ? "/" : "" + packageName + ":" + pkgVersion)); } }); }); return packages; }
class LSPackageLoader { public static final LanguageServerContext.Key<LSPackageLoader> LS_PACKAGE_LOADER_KEY = new LanguageServerContext.Key<>(); private final List<PackageInfo> distRepoPackages; private List<PackageInfo> remoteRepoPackages; private List<PackageInfo> localRepoPackages; private final LSClientLogger clientLogger; public static LSPackageLoader getInstance(LanguageServerContext context) { LSPackageLoader lsPackageLoader = context.get(LS_PACKAGE_LOADER_KEY); if (lsPackageLoader == null) { lsPackageLoader = new LSPackageLoader(context); } return lsPackageLoader; } private LSPackageLoader(LanguageServerContext context) { this.clientLogger = LSClientLogger.getInstance(context); distRepoPackages = this.getDistributionRepoPackages(); context.put(LS_PACKAGE_LOADER_KEY, this); } /** * Get the local repo packages. * * @return {@link List} of local repo packages */ public List<PackageInfo> getLocalRepoPackages(PackageRepository repository) { if (this.localRepoPackages != null) { return this.localRepoPackages; } this.localRepoPackages = getPackagesFromRepository(repository, Collections.emptyList()); return localRepoPackages; } /** * Get the remote repo packages. * * @return {@link List} of remote repo packages */ public List<PackageInfo> getRemoteRepoPackages(PackageRepository repository) { if (this.remoteRepoPackages != null) { return this.remoteRepoPackages; } this.remoteRepoPackages = getPackagesFromRepository(repository, Collections.emptyList()); return this.remoteRepoPackages; } /** * Get the distribution repo packages. * Here the distRepoPackages does not contain the langlib packages and ballerinai packages * * @return {@link List} of distribution repo packages */ public List<PackageInfo> getDistributionRepoPackages() { if (this.distRepoPackages != null) { return this.distRepoPackages; } DefaultEnvironment environment = new DefaultEnvironment(); BallerinaDistribution ballerinaDistribution = BallerinaDistribution.from(environment); PackageRepository packageRepository = ballerinaDistribution.packageRepository(); List<String> skippedLangLibs = Arrays.asList("lang.annotations", "lang.__internal", "lang.query"); return Collections.unmodifiableList(getPackagesFromRepository(packageRepository, skippedLangLibs)); } /** * Get all visible repository and distribution packages. * * @return {@link List} packages */ public List<PackageInfo> getAllVisiblePackages(DocumentServiceContext ctx) { Map<String, PackageInfo> packagesList = new HashMap<>(); this.getDistributionRepoPackages().forEach(packageInfo -> packagesList.put(packageInfo.packageIdentifier(), packageInfo)); List<PackageInfo> repoPackages = this.getPackagesFromBallerinaUserHome(ctx); repoPackages.stream().filter(packageInfo -> !packagesList.containsKey(packageInfo.packageIdentifier())) .forEach(packageInfo -> packagesList.put(packageInfo.packageIdentifier(), packageInfo)); return new ArrayList<>(packagesList.values()); } /** * Returns the list of packages that reside in the BallerinaUserHome (.ballerina) directory. * * @param ctx Document service context. * @return {@link List<PackageInfo>} List of package info. */ public List<PackageInfo> getPackagesFromBallerinaUserHome(DocumentServiceContext ctx) { List<PackageInfo> packagesList = new ArrayList<>(); Optional<Project> project = ctx.workspace().project(ctx.filePath()); if (project.isEmpty()) { return Collections.emptyList(); } BallerinaUserHome ballerinaUserHome = BallerinaUserHome .from(project.get().projectEnvironmentContext().environment()); PackageRepository localRepository = ballerinaUserHome.localPackageRepository(); PackageRepository remoteRepository = ballerinaUserHome.remotePackageRepository(); packagesList.addAll(this.getRemoteRepoPackages(remoteRepository)); packagesList.addAll(this.getLocalRepoPackages(localRepository)); return packagesList; } /** * A light-weight package information holder. */ public static class PackageInfo { private PackageOrg packageOrg; private PackageName packageName; private PackageVersion packageVersion; private Path sourceRoot; private String packageIdentifier; public PackageInfo(Package pkg) { this.packageOrg = pkg.packageOrg(); this.packageName = pkg.packageName(); this.packageVersion = pkg.packageVersion(); this.sourceRoot = pkg.project().sourceRoot(); this.packageIdentifier = packageOrg.toString() + "/" + packageName.toString(); } public PackageName packageName() { return packageName; } public PackageOrg packageOrg() { return packageOrg; } public PackageVersion packageVersion() { return packageVersion; } public Path sourceRoot() { return sourceRoot; } public String packageIdentifier() { return packageIdentifier; } } }
class LSPackageLoader { public static final LanguageServerContext.Key<LSPackageLoader> LS_PACKAGE_LOADER_KEY = new LanguageServerContext.Key<>(); private final List<PackageInfo> distRepoPackages; private List<PackageInfo> remoteRepoPackages; private List<PackageInfo> localRepoPackages; private final LSClientLogger clientLogger; public static LSPackageLoader getInstance(LanguageServerContext context) { LSPackageLoader lsPackageLoader = context.get(LS_PACKAGE_LOADER_KEY); if (lsPackageLoader == null) { lsPackageLoader = new LSPackageLoader(context); } return lsPackageLoader; } private LSPackageLoader(LanguageServerContext context) { this.clientLogger = LSClientLogger.getInstance(context); distRepoPackages = this.getDistributionRepoPackages(); context.put(LS_PACKAGE_LOADER_KEY, this); } /** * Get the local repo packages. * * @return {@link List} of local repo packages */ public List<PackageInfo> getLocalRepoPackages(PackageRepository repository) { if (this.localRepoPackages != null) { return this.localRepoPackages; } this.localRepoPackages = getPackagesFromRepository(repository, Collections.emptyList()); return localRepoPackages; } /** * Get the remote repo packages. * * @return {@link List} of remote repo packages */ public List<PackageInfo> getRemoteRepoPackages(PackageRepository repository) { if (this.remoteRepoPackages != null) { return this.remoteRepoPackages; } this.remoteRepoPackages = getPackagesFromRepository(repository, Collections.emptyList()); return this.remoteRepoPackages; } /** * Get the distribution repo packages. * Here the distRepoPackages does not contain the langlib packages and ballerinai packages * * @return {@link List} of distribution repo packages */ public List<PackageInfo> getDistributionRepoPackages() { if (this.distRepoPackages != null) { return this.distRepoPackages; } DefaultEnvironment environment = new DefaultEnvironment(); BallerinaDistribution ballerinaDistribution = BallerinaDistribution.from(environment); PackageRepository packageRepository = ballerinaDistribution.packageRepository(); List<String> skippedLangLibs = Arrays.asList("lang.annotations", "lang.__internal", "lang.query"); return Collections.unmodifiableList(getPackagesFromRepository(packageRepository, skippedLangLibs)); } /** * Get all visible repository and distribution packages. * * @return {@link List} packages */ public List<PackageInfo> getAllVisiblePackages(DocumentServiceContext ctx) { Map<String, PackageInfo> packagesList = new HashMap<>(); this.getDistributionRepoPackages().forEach(packageInfo -> packagesList.put(packageInfo.packageIdentifier(), packageInfo)); List<PackageInfo> repoPackages = this.getPackagesFromBallerinaUserHome(ctx); repoPackages.stream().filter(packageInfo -> !packagesList.containsKey(packageInfo.packageIdentifier())) .forEach(packageInfo -> packagesList.put(packageInfo.packageIdentifier(), packageInfo)); return new ArrayList<>(packagesList.values()); } /** * Returns the list of packages that reside in the BallerinaUserHome (.ballerina) directory. * * @param ctx Document service context. * @return {@link List<PackageInfo>} List of package info. */ public List<PackageInfo> getPackagesFromBallerinaUserHome(DocumentServiceContext ctx) { List<PackageInfo> packagesList = new ArrayList<>(); Optional<Project> project = ctx.workspace().project(ctx.filePath()); if (project.isEmpty()) { return Collections.emptyList(); } BallerinaUserHome ballerinaUserHome = BallerinaUserHome .from(project.get().projectEnvironmentContext().environment()); PackageRepository localRepository = ballerinaUserHome.localPackageRepository(); PackageRepository remoteRepository = ballerinaUserHome.remotePackageRepository(); packagesList.addAll(this.getRemoteRepoPackages(remoteRepository)); packagesList.addAll(this.getLocalRepoPackages(localRepository)); return packagesList; } /** * A light-weight package information holder. */ public static class PackageInfo { private PackageOrg packageOrg; private PackageName packageName; private PackageVersion packageVersion; private Path sourceRoot; private String packageIdentifier; public PackageInfo(Package pkg) { this.packageOrg = pkg.packageOrg(); this.packageName = pkg.packageName(); this.packageVersion = pkg.packageVersion(); this.sourceRoot = pkg.project().sourceRoot(); this.packageIdentifier = packageOrg.toString() + "/" + packageName.toString(); } public PackageName packageName() { return packageName; } public PackageOrg packageOrg() { return packageOrg; } public PackageVersion packageVersion() { return packageVersion; } public Path sourceRoot() { return sourceRoot; } public String packageIdentifier() { return packageIdentifier; } } }
I think we should stop discussing this here; this is relevant for the entirety of the documentation, and we could get much more experienced people involved if we target that instead.
private static Set<String> findEnabledReportersInConfiguration(Configuration configuration, String includedReportersString) { Set<String> includedReporters = reporterListPattern.splitAsStream(includedReportersString) .filter(r -> !r.isEmpty()) .collect(Collectors.toSet()); Set<String> namedOrderedReporters = new TreeSet<>(String::compareTo); for (String key : configuration.keySet()) { if (key.startsWith(ConfigConstants.METRICS_REPORTER_PREFIX)) { Matcher matcher = reporterClassPattern.matcher(key); if (matcher.matches()) { String reporterName = matcher.group(1); if (includedReporters.isEmpty() || includedReporters.contains(reporterName)) { if (namedOrderedReporters.contains(reporterName)) { LOG.warn("Duplicate class configuration detected for reporter {}.", reporterName); } else { namedOrderedReporters.add(reporterName); } } else { LOG.info("Excluding reporter {}, not configured in reporter list ({}).", reporterName, includedReportersString); } } } } return namedOrderedReporters; }
LOG.info("Excluding reporter {}, not configured in reporter list ({}).", reporterName, includedReportersString);
private static Set<String> findEnabledReportersInConfiguration(Configuration configuration, String includedReportersString) { Set<String> includedReporters = reporterListPattern.splitAsStream(includedReportersString) .filter(r -> !r.isEmpty()) .collect(Collectors.toSet()); Set<String> namedOrderedReporters = new TreeSet<>(String::compareTo); for (String key : configuration.keySet()) { if (key.startsWith(ConfigConstants.METRICS_REPORTER_PREFIX)) { Matcher matcher = reporterClassPattern.matcher(key); if (matcher.matches()) { String reporterName = matcher.group(1); if (includedReporters.isEmpty() || includedReporters.contains(reporterName)) { if (namedOrderedReporters.contains(reporterName)) { LOG.warn("Duplicate class configuration detected for reporter {}.", reporterName); } else { namedOrderedReporters.add(reporterName); } } else { LOG.info("Excluding reporter {}, not configured in reporter list ({}).", reporterName, includedReportersString); } } } } return namedOrderedReporters; }
class ReporterSetup { private static final Logger LOG = LoggerFactory.getLogger(ReporterSetup.class); private static final Pattern reporterListPattern = Pattern.compile("\\s*,\\s*"); private static final Pattern reporterClassPattern = Pattern.compile( Pattern.quote(ConfigConstants.METRICS_REPORTER_PREFIX) + "([\\S&&[^.]]*)\\." + '(' + Pattern.quote(ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX) + '|' + Pattern.quote(ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX) + ')'); private final String name; private final MetricConfig configuration; private final MetricReporter reporter; public ReporterSetup(final String name, final MetricConfig configuration, MetricReporter reporter) { this.name = name; this.configuration = configuration; this.reporter = reporter; } public Optional<String> getDelimiter() { return Optional.ofNullable(configuration.getString(ConfigConstants.METRICS_REPORTER_SCOPE_DELIMITER, null)); } public Optional<String> getIntervalSettings() { return Optional.ofNullable(configuration.getString(ConfigConstants.METRICS_REPORTER_INTERVAL_SUFFIX, null)); } public Set<String> getExcludedVariables() { String excludedVariablesList = configuration.getString(ConfigConstants.METRICS_REPORTER_EXCLUDED_VARIABLES, null); if (excludedVariablesList == null) { return Collections.emptySet(); } else { final Set<String> excludedVariables = new HashSet<>(); for (String exclusion : excludedVariablesList.split(";")) { excludedVariables.add(ScopeFormat.asVariable(exclusion)); } return Collections.unmodifiableSet(excludedVariables); } } public String getName() { return name; } @VisibleForTesting MetricConfig getConfiguration() { return configuration; } public MetricReporter getReporter() { return reporter; } @VisibleForTesting public static ReporterSetup forReporter(String reporterName, MetricReporter reporter) { return createReporterSetup(reporterName, new MetricConfig(), reporter); } @VisibleForTesting public static ReporterSetup forReporter(String reporterName, MetricConfig metricConfig, MetricReporter reporter) { return createReporterSetup(reporterName, metricConfig, reporter); } private static ReporterSetup createReporterSetup(String reporterName, MetricConfig metricConfig, MetricReporter reporter) { LOG.info("Configuring {} with {}.", reporterName, metricConfig); reporter.open(metricConfig); return new ReporterSetup(reporterName, metricConfig, reporter); } public static List<ReporterSetup> fromConfiguration(final Configuration configuration, final PluginManager pluginManager) { String includedReportersString = configuration.getString(MetricOptions.REPORTERS_LIST, ""); Set<String> namedReporters = findEnabledReportersInConfiguration(configuration, includedReportersString); if (namedReporters.isEmpty()) { return Collections.emptyList(); } final List<Tuple2<String, Configuration>> reporterConfigurations = loadReporterConfigurations(configuration, namedReporters); final Map<String, MetricReporterFactory> reporterFactories = loadAvailableReporterFactories(pluginManager); LOG.debug("Loaded Reporter Factories: {}", reporterFactories); List<ReporterSetup> reporterSetups = setupReporters(reporterFactories, reporterConfigurations); LOG.debug("All initialized Reporters:"); reporterSetups.forEach(i -> LOG.debug("{}", i.getName())); return reporterSetups; } private static List<ReporterSetup> setupReporters(Map<String, MetricReporterFactory> reporterFactories, List<Tuple2<String, Configuration>> reporterConfigurations) { List<ReporterSetup> reporterSetups = new ArrayList<>(reporterConfigurations.size()); for (Tuple2<String, Configuration> reporterConfiguration: reporterConfigurations) { String reporterName = reporterConfiguration.f0; Configuration reporterConfig = reporterConfiguration.f1; try { Optional<MetricReporter> metricReporterOptional = loadReporter(reporterName, reporterConfig, reporterFactories); metricReporterOptional.ifPresent(reporter -> { MetricConfig metricConfig = new MetricConfig(); reporterConfig.addAllToProperties(metricConfig); reporterSetups.add(createReporterSetup(reporterName, metricConfig, reporter)); }); } catch (Throwable t) { LOG.error("Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.", reporterName, t); } } return reporterSetups; } private static List<Tuple2<String, Configuration>> loadReporterConfigurations(Configuration configuration, Set<String> namedReporters) { final List<Tuple2<String, Configuration>> reporterConfigurations = new ArrayList<>(namedReporters.size()); for (String namedReporter: namedReporters) { DelegatingConfiguration delegatingConfiguration = new DelegatingConfiguration( configuration, ConfigConstants.METRICS_REPORTER_PREFIX + namedReporter + '.'); reporterConfigurations.add(Tuple2.of(namedReporter, delegatingConfiguration)); } return reporterConfigurations; } private static Map<String, MetricReporterFactory> loadAvailableReporterFactories(PluginManager pluginManager) { final Map<String, MetricReporterFactory> reporterFactories = new HashMap<>(2); final Iterator<MetricReporterFactory> factoryIterator = getAllReporterFactories(pluginManager); LOG.info("Prepare reporter factories (from both SPIs and Plugins):"); while (factoryIterator.hasNext()) { try { MetricReporterFactory factory = factoryIterator.next(); String factoryClassName = factory.getClass().getName(); MetricReporterFactory existingFactory = reporterFactories.get(factoryClassName); if (existingFactory == null){ reporterFactories.put(factoryClassName, factory); LOG.info("Found reporter factory {} at {} ", factoryClassName, new File(factory.getClass().getProtectionDomain().getCodeSource().getLocation().toURI()).getCanonicalPath()); } else { LOG.warn("Multiple implementations of the same reporter were found in 'lib' and/or 'plugins' directories for {}. It is recommended to remove redundant reporter JARs to resolve used versions' ambiguity.", factoryClassName); } } catch (Exception | ServiceConfigurationError e) { LOG.warn("Error while loading reporter factory.", e); } } return Collections.unmodifiableMap(reporterFactories); } private static Iterator<MetricReporterFactory> getAllReporterFactories(PluginManager pluginManager){ final Iterator<MetricReporterFactory> factoryIteratorSPI = ServiceLoader.load(MetricReporterFactory.class).iterator(); final Iterator<MetricReporterFactory> factoryIteratorPlugins = pluginManager.load(MetricReporterFactory.class); return Iterators.concat(factoryIteratorPlugins, factoryIteratorSPI); } private static Optional<MetricReporter> loadReporter( final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) throws ClassNotFoundException, IllegalAccessException, InstantiationException { final String reporterClassName = reporterConfig.getString(ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, null); final String factoryClassName = reporterConfig.getString(ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, null); if (factoryClassName != null) { return loadViaFactory(factoryClassName, reporterName, reporterConfig, reporterFactories); } if (reporterClassName != null) { return loadViaReflection(reporterClassName, reporterName, reporterConfig, reporterFactories); } LOG.warn("No reporter class nor factory set for reporter {}. Metrics might not be exposed/reported.", reporterName); return Optional.empty(); } private static Optional<MetricReporter> loadViaFactory( final String factoryClassName, final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) { MetricReporterFactory factory = reporterFactories.get(factoryClassName); if (factory == null) { LOG.warn("The reporter factory ({}) could not be found for reporter {}. Available factories: {}.", factoryClassName, reporterName, reporterFactories.keySet()); return Optional.empty(); } else { final MetricConfig metricConfig = new MetricConfig(); reporterConfig.addAllToProperties(metricConfig); return Optional.of(factory.createMetricReporter(metricConfig)); } } private static Optional<MetricReporter> loadViaReflection( final String reporterClassName, final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) throws ClassNotFoundException, IllegalAccessException, InstantiationException { final Class<?> reporterClass = Class.forName(reporterClassName); final InstantiateViaFactory alternativeFactoryAnnotation = reporterClass.getAnnotation(InstantiateViaFactory.class); if (alternativeFactoryAnnotation != null) { final String alternativeFactoryClassName = alternativeFactoryAnnotation.factoryClassName(); LOG.info("The reporter configuration of {} is out-dated (but still supported)." + " Please configure a factory class instead: '{}{}.{}: {}' to ensure that the configuration" + " continues to work with future versions.", reporterName, ConfigConstants.METRICS_REPORTER_PREFIX, reporterName, ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, alternativeFactoryClassName); return loadViaFactory(alternativeFactoryClassName, reporterName, reporterConfig, reporterFactories); } return Optional.of((MetricReporter) reporterClass.newInstance()); } }
class ReporterSetup { private static final Logger LOG = LoggerFactory.getLogger(ReporterSetup.class); private static final Pattern reporterListPattern = Pattern.compile("\\s*,\\s*"); private static final Pattern reporterClassPattern = Pattern.compile( Pattern.quote(ConfigConstants.METRICS_REPORTER_PREFIX) + "([\\S&&[^.]]*)\\." + '(' + Pattern.quote(ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX) + '|' + Pattern.quote(ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX) + ')'); private final String name; private final MetricConfig configuration; private final MetricReporter reporter; public ReporterSetup(final String name, final MetricConfig configuration, MetricReporter reporter) { this.name = name; this.configuration = configuration; this.reporter = reporter; } public Optional<String> getDelimiter() { return Optional.ofNullable(configuration.getString(ConfigConstants.METRICS_REPORTER_SCOPE_DELIMITER, null)); } public Optional<String> getIntervalSettings() { return Optional.ofNullable(configuration.getString(ConfigConstants.METRICS_REPORTER_INTERVAL_SUFFIX, null)); } public Set<String> getExcludedVariables() { String excludedVariablesList = configuration.getString(ConfigConstants.METRICS_REPORTER_EXCLUDED_VARIABLES, null); if (excludedVariablesList == null) { return Collections.emptySet(); } else { final Set<String> excludedVariables = new HashSet<>(); for (String exclusion : excludedVariablesList.split(";")) { excludedVariables.add(ScopeFormat.asVariable(exclusion)); } return Collections.unmodifiableSet(excludedVariables); } } public String getName() { return name; } @VisibleForTesting MetricConfig getConfiguration() { return configuration; } public MetricReporter getReporter() { return reporter; } @VisibleForTesting public static ReporterSetup forReporter(String reporterName, MetricReporter reporter) { return createReporterSetup(reporterName, new MetricConfig(), reporter); } @VisibleForTesting public static ReporterSetup forReporter(String reporterName, MetricConfig metricConfig, MetricReporter reporter) { return createReporterSetup(reporterName, metricConfig, reporter); } private static ReporterSetup createReporterSetup(String reporterName, MetricConfig metricConfig, MetricReporter reporter) { reporter.open(metricConfig); return new ReporterSetup(reporterName, metricConfig, reporter); } public static List<ReporterSetup> fromConfiguration(final Configuration configuration, @Nullable final PluginManager pluginManager) { String includedReportersString = configuration.getString(MetricOptions.REPORTERS_LIST, ""); Set<String> namedReporters = findEnabledReportersInConfiguration(configuration, includedReportersString); if (namedReporters.isEmpty()) { return Collections.emptyList(); } final List<Tuple2<String, Configuration>> reporterConfigurations = loadReporterConfigurations(configuration, namedReporters); final Map<String, MetricReporterFactory> reporterFactories = loadAvailableReporterFactories(pluginManager); return setupReporters(reporterFactories, reporterConfigurations); } private static List<Tuple2<String, Configuration>> loadReporterConfigurations(Configuration configuration, Set<String> namedReporters) { final List<Tuple2<String, Configuration>> reporterConfigurations = new ArrayList<>(namedReporters.size()); for (String namedReporter: namedReporters) { DelegatingConfiguration delegatingConfiguration = new DelegatingConfiguration( configuration, ConfigConstants.METRICS_REPORTER_PREFIX + namedReporter + '.'); reporterConfigurations.add(Tuple2.of(namedReporter, delegatingConfiguration)); } return reporterConfigurations; } private static Map<String, MetricReporterFactory> loadAvailableReporterFactories(@Nullable PluginManager pluginManager) { final Map<String, MetricReporterFactory> reporterFactories = new HashMap<>(2); final Iterator<MetricReporterFactory> factoryIterator = getAllReporterFactories(pluginManager); while (factoryIterator.hasNext()) { try { MetricReporterFactory factory = factoryIterator.next(); String factoryClassName = factory.getClass().getName(); MetricReporterFactory existingFactory = reporterFactories.get(factoryClassName); if (existingFactory == null) { reporterFactories.put(factoryClassName, factory); LOG.debug("Found reporter factory {} at {} ", factoryClassName, new File(factory.getClass().getProtectionDomain().getCodeSource().getLocation().toURI()).getCanonicalPath()); } else { LOG.warn("Multiple implementations of the same reporter were found in 'lib' and/or 'plugins' directories for {}. It is recommended to remove redundant reporter JARs to resolve used versions' ambiguity.", factoryClassName); } } catch (Exception | ServiceConfigurationError e) { LOG.warn("Error while loading reporter factory.", e); } } return Collections.unmodifiableMap(reporterFactories); } private static Iterator<MetricReporterFactory> getAllReporterFactories(@Nullable PluginManager pluginManager) { final Iterator<MetricReporterFactory> factoryIteratorSPI = ServiceLoader.load(MetricReporterFactory.class).iterator(); final Iterator<MetricReporterFactory> factoryIteratorPlugins = pluginManager != null ? pluginManager.load(MetricReporterFactory.class) : Collections.emptyIterator(); return Iterators.concat(factoryIteratorPlugins, factoryIteratorSPI); } private static List<ReporterSetup> setupReporters(Map<String, MetricReporterFactory> reporterFactories, List<Tuple2<String, Configuration>> reporterConfigurations) { List<ReporterSetup> reporterSetups = new ArrayList<>(reporterConfigurations.size()); for (Tuple2<String, Configuration> reporterConfiguration: reporterConfigurations) { String reporterName = reporterConfiguration.f0; Configuration reporterConfig = reporterConfiguration.f1; try { Optional<MetricReporter> metricReporterOptional = loadReporter(reporterName, reporterConfig, reporterFactories); metricReporterOptional.ifPresent(reporter -> { MetricConfig metricConfig = new MetricConfig(); reporterConfig.addAllToProperties(metricConfig); reporterSetups.add(createReporterSetup(reporterName, metricConfig, reporter)); }); } catch (Throwable t) { LOG.error("Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.", reporterName, t); } } return reporterSetups; } private static Optional<MetricReporter> loadReporter( final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) throws ClassNotFoundException, IllegalAccessException, InstantiationException { final String reporterClassName = reporterConfig.getString(ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, null); final String factoryClassName = reporterConfig.getString(ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, null); if (factoryClassName != null) { return loadViaFactory(factoryClassName, reporterName, reporterConfig, reporterFactories); } if (reporterClassName != null) { return loadViaReflection(reporterClassName, reporterName, reporterConfig, reporterFactories); } LOG.warn("No reporter class nor factory set for reporter {}. Metrics might not be exposed/reported.", reporterName); return Optional.empty(); } private static Optional<MetricReporter> loadViaFactory( final String factoryClassName, final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) { MetricReporterFactory factory = reporterFactories.get(factoryClassName); if (factory == null) { LOG.warn("The reporter factory ({}) could not be found for reporter {}. Available factories: {}.", factoryClassName, reporterName, reporterFactories.keySet()); return Optional.empty(); } else { final MetricConfig metricConfig = new MetricConfig(); reporterConfig.addAllToProperties(metricConfig); return Optional.of(factory.createMetricReporter(metricConfig)); } } private static Optional<MetricReporter> loadViaReflection( final String reporterClassName, final String reporterName, final Configuration reporterConfig, final Map<String, MetricReporterFactory> reporterFactories) throws ClassNotFoundException, IllegalAccessException, InstantiationException { final Class<?> reporterClass = Class.forName(reporterClassName); final InstantiateViaFactory alternativeFactoryAnnotation = reporterClass.getAnnotation(InstantiateViaFactory.class); if (alternativeFactoryAnnotation != null) { final String alternativeFactoryClassName = alternativeFactoryAnnotation.factoryClassName(); LOG.info("The reporter configuration of {} is out-dated (but still supported)." + " Please configure a factory class instead: '{}{}.{}: {}' to ensure that the configuration" + " continues to work with future versions.", reporterName, ConfigConstants.METRICS_REPORTER_PREFIX, reporterName, ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, alternativeFactoryClassName); return loadViaFactory(alternativeFactoryClassName, reporterName, reporterConfig, reporterFactories); } return Optional.of((MetricReporter) reporterClass.newInstance()); } }
The output seems to be wrong. For the combination where Dependencies.toml exists and --sticky flag is given, we don't update automatically. Instead, we give a hint asking the user to set sticky to false.
public void testBuildProjectPrecompiledWithOlderDistWithStickyFlag() throws IOException { Path projectPath = testResources.resolve("dep-dist-version-projects").resolve("preCompiledPackage"); replaceDependenciesTomlContent(projectPath, "**INSERT_DISTRIBUTION_VERSION_HERE**", "2201.5.0"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs("--sticky"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals( buildLog.replaceAll("\r", ""), getOutput("build-old-dist-precomp-proj-with-dep.txt") .replaceAll("INSERT_NEW_DIST_VERSION_HERE", getNewVersionForOldDistWarning()) .replaceAll("INSERT_OLD_DIST_VERSION_HERE", getOldVersionForOldDistWarning("2201.5.0"))); Path actualDependenciesToml = projectPath.resolve("Dependencies.toml"); Path expectedDependenciesToml = testResources.resolve("dep-dist-version-projects").resolve("expected-dep-tomls") .resolve("precomp-pkg-with-sticky.toml"); Assert.assertTrue(actualDependenciesToml.toFile().exists()); assertTomlFilesEquals(actualDependenciesToml, expectedDependenciesToml); replaceDependenciesTomlContent(projectPath, RepoUtils.getBallerinaShortVersion(), "**INSERT_DISTRIBUTION_VERSION_HERE**"); deleteDirectory(projectPath.resolve("target")); }
.replaceAll("INSERT_OLD_DIST_VERSION_HERE", getOldVersionForOldDistWarning("2201.5.0")));
public void testBuildProjectPrecompiledWithOlderDistWithStickyFlag() throws IOException { Path projectPath = testResources.resolve("dep-dist-version-projects").resolve("preCompiledPackage"); replaceDependenciesTomlContent(projectPath, "**INSERT_DISTRIBUTION_VERSION_HERE**", "2201.5.0"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs("--sticky"); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals( buildLog.replaceAll("\r", ""), getOutput("build-old-dist-precomp-proj-with-sticky.txt") .replaceAll("INSERT_NEW_DIST_VERSION_HERE", getNewVersionForOldDistWarning()) .replaceAll("INSERT_OLD_DIST_VERSION_HERE", getOldVersionForOldDistWarning("2201.5.0"))); Path actualDependenciesToml = projectPath.resolve("Dependencies.toml"); Path expectedDependenciesToml = testResources.resolve("dep-dist-version-projects").resolve("expected-dep-tomls") .resolve("precomp-pkg-with-sticky.toml"); Assert.assertTrue(actualDependenciesToml.toFile().exists()); assertTomlFilesEquals(actualDependenciesToml, expectedDependenciesToml); replaceDependenciesTomlContent(projectPath, RepoUtils.getBallerinaShortVersion(), "**INSERT_DISTRIBUTION_VERSION_HERE**"); deleteDirectory(projectPath.resolve("target")); }
class file */ @Test(description = "Build a ballerina project with conflicted jars") public void testBuildBalProjectWithJarConflicts() throws IOException { Path projectPath = this.testResources.resolve("projectWithConflictedJars"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), getOutput("build-bal-project-with-jar-conflicts.txt")); Assert.assertTrue( projectPath.resolve("target").resolve("bin").resolve("conflictProject.jar").toFile().exists()); Assert.assertTrue(projectPath.resolve("target").resolve("cache").resolve("pramodya") .resolve("conflictProject").resolve("0.1.7").resolve("java11") .resolve("pramodya-conflictProject-0.1.7.jar").toFile().exists()); }
class file */ @Test(description = "Build a ballerina project with conflicted jars") public void testBuildBalProjectWithJarConflicts() throws IOException { Path projectPath = this.testResources.resolve("projectWithConflictedJars"); System.setProperty("user.dir", projectPath.toString()); BuildCommand buildCommand = new BuildCommand(projectPath, printStream, printStream, false); new CommandLine(buildCommand).parseArgs(); buildCommand.execute(); String buildLog = readOutput(true); Assert.assertEquals(buildLog.replaceAll("\r", ""), getOutput("build-bal-project-with-jar-conflicts.txt")); Assert.assertTrue( projectPath.resolve("target").resolve("bin").resolve("conflictProject.jar").toFile().exists()); Assert.assertTrue(projectPath.resolve("target").resolve("cache").resolve("pramodya") .resolve("conflictProject").resolve("0.1.7").resolve("java11") .resolve("pramodya-conflictProject-0.1.7.jar").toFile().exists()); }
Immutable expression should not has mutable field. if we need some mutable state, we should depend the expression type to compute state, and change it when replace children e.g. UnboundExression.isAnalyzed() = false. other expression.isAnalyzed() = Suppliers.memoized(() -> children().allMatch(Expression::isAnalyzed)).
public List<Rule> buildRules() { return ImmutableList.of( RuleType.ANALYZE_PROJECT_SUBQUERY.build( logicalProject().thenApply(ctx -> { LogicalProject<GroupPlan> project = ctx.root; List<SubqueryExpr> subqueryExprs = new ArrayList<>(); project.getProjects() .forEach(expr -> subqueryExprs.addAll(extractSubquery(expr))); if (subqueryExprs.size() == 0) { return project; } return new LogicalProject(project.getProjects(), analyzedSubquery(subqueryExprs, project.child(), ctx.plannerContext)); }) ), RuleType.ANALYZE_FILTER_SUBQUERY.build( logicalFilter().thenApply(ctx -> { LogicalFilter<GroupPlan> filter = ctx.root; List<SubqueryExpr> subqueryExprs = extractSubquery(filter.getPredicates()); if (subqueryExprs.size() == 0) { return filter; } return new LogicalFilter<>(filter.getPredicates(), analyzedSubquery(subqueryExprs, filter.child(), ctx.plannerContext)); }) ), RuleType.ANALYZE_AGGREGATE_SUBQUERY.build( logicalAggregate().thenApply(ctx -> { LogicalAggregate<GroupPlan> agg = ctx.root; List<SubqueryExpr> subqueryExprs = new ArrayList<>(); agg.getGroupByExpressions().forEach(expr -> subqueryExprs.addAll(extractSubquery(expr))); agg.getOutputExpressions().forEach(expr -> subqueryExprs.addAll(extractSubquery(expr))); if (subqueryExprs.size() == 0) { return agg; } return new LogicalAggregate<>(agg.getGroupByExpressions(), agg.getOutputExpressions(), agg.isDisassembled(), agg.getAggPhase(), analyzedSubquery(subqueryExprs, agg.child(), ctx.plannerContext)); }) ), RuleType.ANALYZE_SORT_SUBQUERY.build( logicalSort().thenApply(ctx -> { LogicalSort<GroupPlan> sort = ctx.root; List<SubqueryExpr> subqueryExprs = new ArrayList<>(); sort.getOrderKeys().forEach(orderKey -> subqueryExprs.addAll(extractSubquery( orderKey.getExpr()))); if (subqueryExprs.size() == 0) { return sort; } return new LogicalSort<>(sort.getOrderKeys(), analyzedSubquery(subqueryExprs, sort.child(), ctx.plannerContext)); }) ) ); }
RuleType.ANALYZE_SORT_SUBQUERY.build(
public List<Rule> buildRules() { return ImmutableList.of( RuleType.ANALYZE_FILTER_SUBQUERY.build( logicalFilter().thenApply(ctx -> { LogicalFilter filter = ctx.root; List<SubqueryExpr> subqueryExprs = filter.getPredicates() .collect(SubqueryExpr.class::isInstance); if (subqueryExprs.isEmpty()) { return filter; } return new LogicalFilter<>(new ReplaceSubquery().replace(filter.getPredicates()), analyzedSubquery( subqueryExprs, (LogicalPlan) filter.child(), ctx.cascadesContext )); }) ) ); }
class AnalyzeSubquery implements AnalysisRuleFactory { @Override private List<SubqueryExpr> extractSubquery(Expression expression) { if (expression instanceof SubqueryExpr) { return ImmutableList.of((SubqueryExpr) expression); } Builder<SubqueryExpr> builder = ImmutableList.<SubqueryExpr>builder(); getAllSubquery(expression, builder); return builder.build(); } private void getAllSubquery(Expression expression, Builder builder) { for (Expression expr : expression.children()) { if (expr instanceof SubqueryExpr) { builder.add(expr); } else { getAllSubquery(expr, builder); } } } private LogicalPlan analyzedSubquery(List<SubqueryExpr> subqueryExprs, LogicalPlan childPlan, PlannerContext ctx) { for (SubqueryExpr subqueryExpr : subqueryExprs) { if (!subqueryExpr.isAnalyzed()) { if (subqueryExpr instanceof InSubquery) { return addInSubqueryApplyNodes((InSubquery) subqueryExpr, childPlan, ctx); } else if (subqueryExpr instanceof ScalarSubquery) { return addScalarSubqueryCorrelatedJoins((ScalarSubquery) subqueryExpr, childPlan, ctx); } else if (subqueryExpr instanceof Exists) { return addExistsApplyNodes((Exists) subqueryExpr, childPlan, ctx); } } } return childPlan; } private LogicalPlan addScalarSubqueryCorrelatedJoins(ScalarSubquery scalarSubquery, LogicalPlan childPlan, PlannerContext ctx) { LogicalPlan enforce = new LogicalEnforceSingleRow<>(scalarSubquery.getQueryPlan()); scalarSubquery.setAnalyzed(true); return new LogicalCorrelatedJoin(childPlan, enforce, ctx.getSubquery(scalarSubquery.getQueryPlan()).getCorrelateSlots(), JoinType.INNER_JOIN, Optional.of(BooleanLiteral.TRUE)); } private LogicalPlan addInSubqueryApplyNodes(InSubquery inSubquery, LogicalPlan childPlan, PlannerContext ctx) { inSubquery.setAnalyzed(true); return appendApplyNode(inSubquery, childPlan, ctx.getSubquery(inSubquery.getQueryPlan()).getCorrelateSlots()); } private LogicalPlan addExistsApplyNodes(Exists exists, LogicalPlan childPlan, PlannerContext ctx) { exists.setAnalyzed(true); return appendApplyNode(exists, childPlan, ctx.getSubquery(exists.getQueryPlan()).getCorrelateSlots()); } private LogicalPlan appendApplyNode(SubqueryExpr subqueryExpr, LogicalPlan childPlan, List<Slot> correlateSlots) { return new LogicalApply(childPlan, subqueryExpr.getQueryPlan(), correlateSlots); } }
class AnalyzeSubquery implements AnalysisRuleFactory { @Override private LogicalPlan analyzedSubquery(List<SubqueryExpr> subqueryExprs, LogicalPlan childPlan, CascadesContext ctx) { LogicalPlan tmpPlan = childPlan; for (SubqueryExpr subqueryExpr : subqueryExprs) { if (!ctx.subqueryIsAnalyzed(subqueryExpr)) { tmpPlan = addApply(subqueryExpr, tmpPlan, ctx); } } return tmpPlan; } private LogicalPlan addApply(SubqueryExpr subquery, LogicalPlan childPlan, CascadesContext ctx) { ctx.setSubqueryExprIsAnalyzed(subquery, true); LogicalApply newApply = new LogicalApply( subquery.getCorrelateSlots(), subquery, Optional.empty(), childPlan, subquery.getQueryPlan()); List<Slot> projects = new ArrayList<>(childPlan.getOutput()); if (subquery instanceof ScalarSubquery) { projects.add(subquery.getQueryPlan().getOutput().get(0)); } return new LogicalProject(projects, newApply); } /** * The Subquery in the LogicalFilter will change to LogicalApply, so we must replace the origin Subquery. * LogicalFilter(predicate(contain subquery)) -> LogicalFilter(predicate(not contain subquery) * Replace the subquery in logical with the relevant expression. * * The replacement rules are as follows: * before: * 1.filter(t1.a = scalarSubquery(output b)); * 2.filter(inSubquery); inSubquery = (t1.a in select ***); * 3.filter(exists); exists = (select ***); * * after: * 1.filter(t1.a = b); * 2.filter(True); * 3.filter(True); */ private static class ReplaceSubquery extends DefaultExpressionRewriter { public Expression replace(Expression expression) { return (Expression) expression.accept(this, null); } @Override public Object visitExistsSubquery(Exists exists, Object context) { return BooleanLiteral.TRUE; } @Override public Object visitInSubquery(InSubquery in, Object context) { return BooleanLiteral.TRUE; } @Override public Object visitScalarSubquery(ScalarSubquery scalar, Object context) { return scalar.getQueryPlan().getOutput().get(0); } } }
We should generally avoid the use of [magic number ](https://stackoverflow.com/questions/47882/what-is-a-magic-number-and-why-is-it-bad) and replace it with what it stands for.
public static String parseOperationId(String operationLocation) { if (!CoreUtils.isNullOrEmpty(operationLocation)) { int lastIndex = operationLocation.lastIndexOf('/'); if (lastIndex != -1) { return operationLocation.substring(lastIndex + 1, lastIndex + 37); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse operation header for operation Id from: " + operationLocation)); }
return operationLocation.substring(lastIndex + 1, lastIndex + 37);
public static String parseOperationId(String operationLocation) { if (!CoreUtils.isNullOrEmpty(operationLocation)) { final int indexBeforeOperationId = operationLocation.lastIndexOf('/'); if (indexBeforeOperationId != -1) { return operationLocation.substring(indexBeforeOperationId + 1, indexBeforeOperationId + OPERATION_ID_LENGTH); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse operation header for operation Id from: " + operationLocation)); }
class Utility { public static final Duration DEFAULT_POLL_INTERVAL = Duration.ofSeconds(30); private static final ClientLogger LOGGER = new ClientLogger(Utility.class); private static final int NEUTRAL_SCORE_ZERO = 0; private static final String DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP = " private static final Pattern PATTERN; static { PATTERN = Pattern.compile(DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP); } private Utility() { } /** * Verify that list of documents are not null or empty. Otherwise, throw exception. * * @param documents A list of documents. * * @throws NullPointerException if {@code documents} is null. * @throws IllegalArgumentException if {@code documents} is empty. */ public static void inputDocumentsValidation(Iterable<?> documents) { Objects.requireNonNull(documents, "'documents' cannot be null."); final Iterator<?> iterator = documents.iterator(); if (!iterator.hasNext()) { throw new IllegalArgumentException("'documents' cannot be empty."); } } /** * Mapping a {@link ErrorResponseException} to {@link HttpResponseException} if exist. Otherwise, return * original {@link Throwable}. * * @param throwable A {@link Throwable}. * @return A {@link HttpResponseException} or the original throwable type. */ public static Throwable mapToHttpResponseExceptionIfExists(Throwable throwable) { if (throwable instanceof ErrorResponseException) { ErrorResponseException errorException = (ErrorResponseException) throwable; final ErrorResponse errorResponse = errorException.getValue(); com.azure.ai.textanalytics.models.TextAnalyticsError textAnalyticsError = null; if (errorResponse != null && errorResponse.getError() != null) { textAnalyticsError = toTextAnalyticsError(errorResponse.getError()); } return new HttpResponseException(errorException.getMessage(), errorException.getResponse(), textAnalyticsError); } return throwable; } /** * Given a list of documents will apply the indexing function to it and return the updated list. * * @param documents the inputs to apply the mapping function to. * @param mappingFunction the function which applies the index to the incoming input value. * @param <T> the type of items being returned in the list. * @return The list holding all the generic items combined. */ public static <T> List<T> mapByIndex(Iterable<String> documents, BiFunction<String, String, T> mappingFunction) { Objects.requireNonNull(documents, "'documents' cannot be null."); AtomicInteger i = new AtomicInteger(0); List<T> result = new ArrayList<>(); documents.forEach(document -> result.add(mappingFunction.apply(String.valueOf(i.getAndIncrement()), document)) ); return result; } /** * Convert {@link DocumentStatistics} to {@link TextDocumentStatistics} * * @param statistics the {@link DocumentStatistics} provided by the service. * @return the {@link TextDocumentStatistics} returned by the SDK. */ public static TextDocumentStatistics toTextDocumentStatistics(DocumentStatistics statistics) { return new TextDocumentStatistics(statistics.getCharactersCount(), statistics.getTransactionsCount()); } /** * Convert {@link RequestStatistics} to {@link TextDocumentBatchStatistics} * * @param statistics the {@link RequestStatistics} provided by the service. * @return the {@link TextDocumentBatchStatistics} returned by the SDK. */ public static TextDocumentBatchStatistics toBatchStatistics(RequestStatistics statistics) { return new TextDocumentBatchStatistics(statistics.getDocumentsCount(), statistics.getValidDocumentsCount(), statistics.getErroneousDocumentsCount(), statistics.getTransactionsCount()); } /** * Convert {@link Error} to {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * This function maps the service returned {@link Error inner error} to the top level * {@link com.azure.ai.textanalytics.models.TextAnalyticsError error}, if inner error present. * * @param error the {@link Error} returned by the service. * @return the {@link com.azure.ai.textanalytics.models.TextAnalyticsError} returned by the SDK. */ public static TextAnalyticsError toTextAnalyticsError(Error error) { final InnerErrorModel innerError = error.getInnererror(); if (innerError == null) { final ErrorCode errorCode = error.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(errorCode == null ? null : errorCode.toString()), error.getMessage(), error.getTarget()); } final InnerErrorCode innerErrorCodeValue = innerError.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(innerErrorCodeValue == null ? null : innerErrorCodeValue.toString()), innerError.getMessage(), innerError.getTarget()); } public static TextAnalyticsWarning toTextAnalyticsWarning( DocumentWarning warning) { final WarningCodeValue warningCodeValue = warning.getCode(); return new TextAnalyticsWarning( WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()), warning.getMessage()); } /** * Convert the incoming input {@link TextDocumentInput} to the service expected {@link MultiLanguageInput}. * * @param documents the user provided input in {@link TextDocumentInput} * @return the service required input {@link MultiLanguageInput} */ public static List<MultiLanguageInput> toMultiLanguageInput(Iterable<TextDocumentInput> documents) { List<MultiLanguageInput> multiLanguageInputs = new ArrayList<>(); for (TextDocumentInput textDocumentInput : documents) { multiLanguageInputs.add(new MultiLanguageInput().setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()).setLanguage(textDocumentInput.getLanguage())); } return multiLanguageInputs; } /** * Convert the incoming input {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * to a {@link TextAnalyticsException}. * * @param error the {@link com.azure.ai.textanalytics.models.TextAnalyticsError}. * @return the {@link TextAnalyticsException} to be thrown. */ public static TextAnalyticsException toTextAnalyticsException( com.azure.ai.textanalytics.models.TextAnalyticsError error) { return new TextAnalyticsException(error.getMessage(), error.getErrorCode(), error.getTarget()); } /** * Convert to a list of {@link LanguageInput} from {@link DetectLanguageInput}. * * @param documents The list of documents to detect languages for. * * @return a list of {@link LanguageInput}. */ public static List<LanguageInput> toLanguageInput(Iterable<DetectLanguageInput> documents) { final List<LanguageInput> multiLanguageInputs = new ArrayList<>(); documents.forEach(textDocumentInput -> multiLanguageInputs.add(new LanguageInput() .setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()) .setCountryHint(textDocumentInput.getCountryHint()))); return multiLanguageInputs; } /** * Extracts the operation ID from the 'operation-location' URL. An example of 'operation-location' is * https: * * @param operationLocation The URL specified in the 'Operation-Location' response header containing the * operation ID used to track the progress and obtain the ID of the analyze operation. * * @return The operation ID that tracks the long running operation progress. */ /** * Extract the next pagination link which contains the request parameter values, into map, * such as '$skip=20' and '$top=2'. * * @param nextLink the next pagination link. * * @return A map that holds the request parameter value of next pagination link. */ public static Map<String, Object> parseNextLink(String nextLink) { if (!CoreUtils.isNullOrEmpty(nextLink)) { final Map<String, Object> parameterMap = new HashMap<>(); final String[] strings = nextLink.split("\\?", 2); final String[] parameters = strings[1].split("&"); for (String parameter : parameters) { final String[] parameterPair = parameter.split("="); final String key = parameterPair[0]; final String value = parameterPair[1]; if ("showStats".equals(key)) { parameterMap.put(key, value); } else if ("$skip".equals(key) || "$top".equals(key)) { parameterMap.put(key, Integer.valueOf(value)); } } return parameterMap; } return new HashMap<>(); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse( Response<SentimentResponse> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(response.getValue())); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(((SentimentTaskResult) response.getValue()).getResults())); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse( Response<LanguageResult> response) { final LanguageResult languageResult = response.getValue(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (DocumentLanguage documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings)))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { final LanguageDetectionResult languageResult = ((LanguageDetectionTaskResult) response.getValue()).getResults(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (LanguageDetectionDocumentResult documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings() .stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings) ))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse( final Response<KeyPhraseResult> response) { final KeyPhraseResult keyPhraseResult = response.getValue(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final KeyPhraseResult keyPhraseResult = ((KeyPhraseTaskResult) response.getValue()).getResults(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static RecognizeEntitiesResultCollection toRecognizeEntitiesResultCollectionResponse( final EntitiesResult entitiesResult) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); entitiesResult.getDocuments().forEach(documentEntities -> recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentEntities))); for (DocumentError documentError : entitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics())); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection( final Response<EntitiesResult> response) { EntitiesResult entitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(entitiesResult), entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics()))); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection2( final Response<AnalyzeTextTaskResult> response) { EntitiesTaskResult entitiesTaskResult = (EntitiesTaskResult) response.getValue(); final EntitiesResult results = entitiesTaskResult.getResults(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(results), results.getModelVersion(), results.getStatistics() == null ? null : toBatchStatistics(results.getStatistics()))); } public static List<RecognizeEntitiesResult> toRecognizeEntitiesResults(EntitiesResult results) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); results.getDocuments().forEach( documentEntities -> recognizeEntitiesResults.add(new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>( documentEntities.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))))); for (DocumentError documentError : results.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(EntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizeEntitiesResult toRecognizeEntitiesResult(CustomEntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse( final Response<PiiResult> response) { final PiiResult piiEntitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final PiiResult piiEntitiesResult = ((PiiTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static List<RecognizePiiEntitiesResult> toRecognizePiiEntitiesResults(PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map( entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); PiiEntityPropertiesHelper.setLength(piiEntity, entity.getLength()); return piiEntity; }) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(DocumentEntities documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizePiiEntitiesResultCollection toRecognizePiiEntitiesResultCollection( final PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); return piiEntity; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics())); } public static ExtractKeyPhrasesResultCollection toExtractKeyPhrasesResultCollection( final KeyPhraseResult keyPhraseResult) { final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics())); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollectionResponse( final Response<EntityLinkingResult> response) { final EntityLinkingResult entityLinkingResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollection( final Response<AnalyzeTextTaskResult> response) { final EntityLinkingResult entityLinkingResult = ((EntityLinkingTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static RecognizeLinkedEntitiesResultCollection toRecognizeLinkedEntitiesResultCollection( final EntityLinkingResult entityLinkingResult) { final List<RecognizeLinkedEntitiesResult> linkedEntitiesResults = entityLinkingResult.getDocuments().stream().map( documentLinkedEntities -> new RecognizeLinkedEntitiesResult( documentLinkedEntities.getId(), documentLinkedEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentLinkedEntities.getStatistics()), null, new LinkedEntityCollection(new IterableStream<>( documentLinkedEntities.getEntities().stream().map( linkedEntity -> { final LinkedEntity entity = new LinkedEntity( linkedEntity.getName(), new IterableStream<>( linkedEntity.getMatches().stream().map( match -> { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch( match.getText(), match.getConfidenceScore()); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, match.getOffset()); LinkedEntityMatchPropertiesHelper.setLength(linkedEntityMatch, match.getLength()); return linkedEntityMatch; }).collect(Collectors.toList())), linkedEntity.getLanguage(), linkedEntity.getId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(entity, linkedEntity.getBingId()); return entity; }).collect(Collectors.toList())), new IterableStream<>(documentLinkedEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))) ).collect(Collectors.toList()); for (DocumentError documentError : entityLinkingResult.getErrors()) { linkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeLinkedEntitiesResultCollection(linkedEntitiesResults, entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics())); } /** * Helper method to convert {@link SentimentResponse} to {@link AnalyzeSentimentResultCollection}. * * @param sentimentResponse The {@link SentimentResponse}. * * @return A {@link AnalyzeSentimentResultCollection}. */ public static AnalyzeSentimentResultCollection toAnalyzeSentimentResultCollection( SentimentResponse sentimentResponse) { final List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); final List<SentimentResponseDocumentsItem> documentSentiments = sentimentResponse.getDocuments(); for (SentimentResponseDocumentsItem documentSentiment : documentSentiments) { analyzeSentimentResults.add(toAnalyzeSentimentResult(documentSentiment, documentSentiments)); } for (DocumentError documentError : sentimentResponse.getErrors()) { analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new AnalyzeSentimentResultCollection(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert {@link ExtractiveSummarizationResult} to {@link ExtractSummaryResultCollection}. * * @param extractiveSummarizationResult The {@link ExtractiveSummarizationResult}. * * @return A {@link ExtractSummaryResultCollection}. */ public static ExtractSummaryResultCollection toExtractSummaryResultCollection( ExtractiveSummarizationResult extractiveSummarizationResult) { final List<ExtractSummaryResult> extractSummaryResults = new ArrayList<>(); final List<ExtractiveSummarizationResultDocumentsItem> extractedDocumentSummaries = extractiveSummarizationResult.getDocuments(); for (ExtractiveSummarizationResultDocumentsItem documentSummary : extractedDocumentSummaries) { extractSummaryResults.add(toExtractSummaryResult(documentSummary)); } for (DocumentError documentError : extractiveSummarizationResult.getErrors()) { extractSummaryResults.add(new ExtractSummaryResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } return new ExtractSummaryResultCollection(extractSummaryResults, extractiveSummarizationResult.getModelVersion(), extractiveSummarizationResult.getStatistics() == null ? null : toBatchStatistics(extractiveSummarizationResult.getStatistics())); } /** * Transfer {@link HealthcareResult} into {@link AnalyzeHealthcareEntitiesResultCollection}. * * @param healthcareResult the service side raw data, HealthcareResult. * * @return the client side explored model, AnalyzeHealthcareEntitiesResultCollection. */ public static AnalyzeHealthcareEntitiesResultCollection toAnalyzeHealthcareEntitiesResultCollection( HealthcareResult healthcareResult) { List<AnalyzeHealthcareEntitiesResult> analyzeHealthcareEntitiesResults = new ArrayList<>(); healthcareResult.getDocuments().forEach( documentEntities -> { final AnalyzeHealthcareEntitiesResult analyzeHealthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( textAnalyticsWarning -> new TextAnalyticsWarning( Optional.ofNullable(textAnalyticsWarning.getCode()) .map(warningCodeValue -> WarningCode.fromString(warningCodeValue.toString())) .orElse(null), textAnalyticsWarning.getMessage()) ).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setWarnings(analyzeHealthcareEntitiesResult, IterableStream.of(warnings)); final List<HealthcareEntity> healthcareEntities = documentEntities.getEntities().stream().map( entity -> { final HealthcareEntity healthcareEntity = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity, entity.getText()); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity, entity.getName()); if (entity.getCategory() != null) { HealthcareEntityPropertiesHelper.setCategory(healthcareEntity, HealthcareEntityCategory.fromString(entity.getCategory().toString())); } HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity, entity.getConfidenceScore()); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity, entity.getOffset()); HealthcareEntityPropertiesHelper.setLength(healthcareEntity, entity.getLength()); final List<EntityDataSource> entityDataSources = Optional.ofNullable(entity.getLinks()).map( links -> links.stream().map( link -> { final EntityDataSource dataSource = new EntityDataSource(); EntityDataSourcePropertiesHelper.setName(dataSource, link.getDataSource()); EntityDataSourcePropertiesHelper.setEntityId(dataSource, link.getId()); return dataSource; } ).collect(Collectors.toList())) .orElse(new ArrayList<>()); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity, IterableStream.of(entityDataSources)); final HealthcareAssertion assertion = entity.getAssertion(); if (assertion != null) { HealthcareEntityPropertiesHelper.setAssertion(healthcareEntity, toHealthcareEntityAssertion(assertion)); } return healthcareEntity; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntities)); final List<HealthcareEntityRelation> healthcareEntityRelations = documentEntities.getRelations().stream().map( healthcareRelation -> { final HealthcareEntityRelation entityRelation = new HealthcareEntityRelation(); final RelationType relationType = healthcareRelation.getRelationType(); if (relationType != null) { HealthcareEntityRelationPropertiesHelper.setRelationType(entityRelation, HealthcareEntityRelationType.fromString(relationType.toString())); } final List<HealthcareEntityRelationRole> relationRoles = healthcareRelation.getEntities().stream().map( relationEntity -> { final HealthcareEntityRelationRole relationRole = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(relationRole, relationEntity.getRole()); HealthcareEntityRelationRolePropertiesHelper.setEntity(relationRole, healthcareEntities.get(getHealthcareEntityIndex(relationEntity.getRef()))); return relationRole; }).collect(Collectors.toList()); HealthcareEntityRelationPropertiesHelper.setRoles(entityRelation, IterableStream.of(relationRoles)); return entityRelation; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntityRelations)); analyzeHealthcareEntitiesResults.add(analyzeHealthcareEntitiesResult); }); healthcareResult.getErrors().forEach(documentError -> analyzeHealthcareEntitiesResults.add(new AnalyzeHealthcareEntitiesResult( documentError.getId(), null, toTextAnalyticsError(documentError.getError()))) ); return new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(analyzeHealthcareEntitiesResults)); } public static HealthcareEntityAssertion toHealthcareEntityAssertion(HealthcareAssertion healthcareAssertion) { final Association association = healthcareAssertion.getAssociation(); final Certainty certainty = healthcareAssertion.getCertainty(); final Conditionality conditionality = healthcareAssertion.getConditionality(); final HealthcareEntityAssertion entityAssertion = new HealthcareEntityAssertion(); if (association != null) { HealthcareEntityAssertionPropertiesHelper.setAssociation(entityAssertion, EntityAssociation.fromString(association.toString())); } if (certainty != null) { HealthcareEntityAssertionPropertiesHelper.setCertainty(entityAssertion, toCertainty(certainty)); } if (conditionality != null) { HealthcareEntityAssertionPropertiesHelper.setConditionality(entityAssertion, toConditionality(conditionality)); } return entityAssertion; } private static EntityCertainty toCertainty(Certainty certainty) { EntityCertainty entityCertainty1 = null; switch (certainty) { case POSITIVE: entityCertainty1 = EntityCertainty.POSITIVE; break; case POSITIVE_POSSIBLE: entityCertainty1 = EntityCertainty.POSITIVE_POSSIBLE; break; case NEUTRAL_POSSIBLE: entityCertainty1 = EntityCertainty.NEUTRAL_POSSIBLE; break; case NEGATIVE_POSSIBLE: entityCertainty1 = EntityCertainty.NEGATIVE_POSSIBLE; break; case NEGATIVE: entityCertainty1 = EntityCertainty.NEGATIVE; break; default: break; } return entityCertainty1; } private static EntityConditionality toConditionality(Conditionality conditionality) { EntityConditionality conditionality1 = null; switch (conditionality) { case HYPOTHETICAL: conditionality1 = EntityConditionality.HYPOTHETICAL; break; case CONDITIONAL: conditionality1 = EntityConditionality.CONDITIONAL; break; default: break; } return conditionality1; } /** * Helper function that parse healthcare entity index from the given entity reference string. * The entity reference format is " * * @param entityReference the given healthcare entity reference string. * * @return the healthcare entity index. */ private static Integer getHealthcareEntityIndex(String entityReference) { if (!CoreUtils.isNullOrEmpty(entityReference)) { int lastIndex = entityReference.lastIndexOf('/'); if (lastIndex != -1) { return Integer.parseInt(entityReference.substring(lastIndex + 1)); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse healthcare entity index from: " + entityReference)); } /** * Get the non-null {@link Context}. The default value is {@link Context * * @param context It offers a means of passing arbitrary data (key-value pairs) to pipeline policies. * Most applications do not need to pass arbitrary data to the pipeline and can pass Context.NONE or null. * * @return The Context. */ public static Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } /** * Helper function which retrieves the size of an {@link Iterable}. * * @param documents The iterable of documents. * @return Count of documents in the iterable. */ public static int getDocumentCount(Iterable<?> documents) { if (documents instanceof Collection) { return ((Collection<?>) documents).size(); } else { final int[] count = new int[] { 0 }; documents.forEach(ignored -> count[0] += 1); return count[0]; } } /** * Helper function which convert the {@code Iterable<PiiEntityCategory>} to {@code List<PiiCategory>}. * * @param categoriesFilter the iterable of {@link PiiEntityCategory}. * @return the list of {@link PiiCategory}. */ public static List<PiiCategory> toCategoriesFilter(Iterable<PiiEntityCategory> categoriesFilter) { if (categoriesFilter == null) { return null; } final List<PiiCategory> piiCategories = new ArrayList<>(); categoriesFilter.forEach(category -> piiCategories.add(PiiCategory.fromString(category.toString()))); return piiCategories; } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment The {@link SentimentResponseDocumentsItem} returned by the service. * @param documentSentimentList The document sentiment list returned by the service. * * @return The {@link AnalyzeSentimentResult} to be returned by the SDK. */ private static AnalyzeSentimentResult toAnalyzeSentimentResult(SentimentResponseDocumentsItem documentSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getConfidenceScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { final SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getConfidenceScores(); final SentenceSentimentValue sentenceSentimentValue = sentenceSentiment.getSentiment(); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(sentenceSentiment.getText(), TextSentiment.fromString(sentenceSentimentValue == null ? null : sentenceSentimentValue.toString()), new SentimentConfidenceScores(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive())); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, toSentenceOpinionList(sentenceSentiment, documentSentimentList)); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, sentenceSentiment.getOffset()); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, sentenceSentiment.getLength()); return sentenceSentiment1; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSentiment.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final DocumentSentimentValue documentSentimentValue = documentSentiment.getSentiment(); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( TextSentiment.fromString(documentSentimentValue == null ? null : documentSentimentValue.toString()), new SentimentConfidenceScores( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), new IterableStream<>(sentenceSentiments), new IterableStream<>(warnings) )); } /* * Transform SentenceSentiment's opinion mining to output that user can use. */ private static IterableStream<SentenceOpinion> toSentenceOpinionList( com.azure.ai.textanalytics.implementation.models.SentenceSentiment sentenceSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final List<SentenceTarget> sentenceTargets = sentenceSentiment.getTargets(); if (sentenceTargets == null) { return null; } final List<SentenceOpinion> sentenceOpinions = new ArrayList<>(); sentenceTargets.forEach(sentenceTarget -> { final List<AssessmentSentiment> assessmentSentiments = new ArrayList<>(); sentenceTarget.getRelations().forEach(targetRelation -> { final TargetRelationType targetRelationType = targetRelation.getRelationType(); final String opinionPointer = targetRelation.getRef(); if (TargetRelationType.ASSESSMENT == targetRelationType) { assessmentSentiments.add(toAssessmentSentiment( findSentimentAssessment(opinionPointer, documentSentimentList))); } }); final TargetSentiment targetSentiment = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment, sentenceTarget.getText()); TargetSentimentPropertiesHelper.setSentiment(targetSentiment, TextSentiment.fromString(sentenceTarget.getSentiment().toString())); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment, toSentimentConfidenceScores(sentenceTarget.getConfidenceScores())); TargetSentimentPropertiesHelper.setOffset(targetSentiment, sentenceTarget.getOffset()); TargetSentimentPropertiesHelper.setLength(targetSentiment, sentenceTarget.getLength()); final SentenceOpinion sentenceOpinion = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion, targetSentiment); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion, new IterableStream<>(assessmentSentiments)); sentenceOpinions.add(sentenceOpinion); }); return new IterableStream<>(sentenceOpinions); } /* * Transform type TargetConfidenceScoreLabel to SentimentConfidenceScores. */ private static SentimentConfidenceScores toSentimentConfidenceScores( TargetConfidenceScoreLabel targetConfidenceScoreLabel) { return new SentimentConfidenceScores(targetConfidenceScoreLabel.getNegative(), NEUTRAL_SCORE_ZERO, targetConfidenceScoreLabel.getPositive()); } /* * Transform type SentenceOpinion to OpinionSentiment. */ private static AssessmentSentiment toAssessmentSentiment(SentenceAssessment sentenceAssessment) { final AssessmentSentiment assessmentSentiment = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment, sentenceAssessment.getText()); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment, TextSentiment.fromString(sentenceAssessment.getSentiment().toString())); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment, toSentimentConfidenceScores(sentenceAssessment.getConfidenceScores())); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment, sentenceAssessment.isNegated()); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment, sentenceAssessment.getOffset()); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment, sentenceAssessment.getLength()); return assessmentSentiment; } private static ExtractSummaryResult toExtractSummaryResult( ExtractiveSummarizationResultDocumentsItem documentSummary) { final List<ExtractedSummarySentence> sentences = documentSummary.getSentences(); final List<SummarySentence> summarySentences = sentences.stream().map(sentence -> { final SummarySentence summarySentence = new SummarySentence(); SummarySentencePropertiesHelper.setText(summarySentence, sentence.getText()); SummarySentencePropertiesHelper.setRankScore(summarySentence, sentence.getRankScore()); SummarySentencePropertiesHelper.setLength(summarySentence, sentence.getLength()); SummarySentencePropertiesHelper.setOffset(summarySentence, sentence.getOffset()); return summarySentence; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSummary.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SummarySentenceCollection summarySentenceCollection = new SummarySentenceCollection( new IterableStream<>(summarySentences), new IterableStream<>(warnings) ); final ExtractSummaryResult extractSummaryResult = new ExtractSummaryResult(documentSummary.getId(), documentSummary.getStatistics() == null ? null : toTextDocumentStatistics(documentSummary.getStatistics()), null ); ExtractSummaryResultPropertiesHelper.setSentences(extractSummaryResult, summarySentenceCollection); return extractSummaryResult; } /** * Helper method to convert {@link CustomEntitiesResult} to {@link RecognizeCustomEntitiesResultCollection}. * * @param customEntitiesResult The {@link CustomEntitiesResult}. * * @return A {@link RecognizeCustomEntitiesResultCollection}. */ public static RecognizeCustomEntitiesResultCollection toRecognizeCustomEntitiesResultCollection( CustomEntitiesResult customEntitiesResult) { final List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); final List<CustomEntitiesResultDocumentsItem> customEntitiesResultDocuments = customEntitiesResult.getDocuments(); for (CustomEntitiesResultDocumentsItem documentSummary : customEntitiesResultDocuments) { recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentSummary)); } for (DocumentError documentError : customEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } final RecognizeCustomEntitiesResultCollection resultCollection = new RecognizeCustomEntitiesResultCollection(recognizeEntitiesResults); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setProjectName(resultCollection, customEntitiesResult.getProjectName()); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customEntitiesResult.getDeploymentName()); if (customEntitiesResult.getStatistics() != null) { RecognizeCustomEntitiesResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customEntitiesResult.getStatistics())); } return resultCollection; } /** * Helper method to convert {@link CustomSingleClassificationResult} to * {@link SingleCategoryClassifyResultCollection}. * * @param customSingleClassificationResult The {@link CustomSingleClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static SingleCategoryClassifyResultCollection toSingleCategoryClassifyResultCollection( CustomSingleLabelClassificationResult customSingleClassificationResult) { final List<SingleCategoryClassifyResult> singleCategoryClassifyResults = new ArrayList<>(); final List<CustomSingleLabelClassificationResultDocumentsItem> singleClassificationDocuments = customSingleClassificationResult.getDocuments(); for (CustomSingleLabelClassificationResultDocumentsItem documentSummary : singleClassificationDocuments) { singleCategoryClassifyResults.add(toSingleCategoryClassifyResult(documentSummary)); } for (DocumentError documentError : customSingleClassificationResult.getErrors()) { singleCategoryClassifyResults.add(new SingleCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final SingleCategoryClassifyResultCollection resultCollection = new SingleCategoryClassifyResultCollection(singleCategoryClassifyResults); SingleCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customSingleClassificationResult.getProjectName()); SingleCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customSingleClassificationResult.getDeploymentName()); if (customSingleClassificationResult.getStatistics() != null) { SingleCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customSingleClassificationResult.getStatistics())); } return resultCollection; } private static SingleCategoryClassifyResult toSingleCategoryClassifyResult( CustomSingleLabelClassificationResultDocumentsItem singleClassificationDocument) { final ClassificationResult classificationResult = singleClassificationDocument.getClassProperty(); final List<TextAnalyticsWarning> warnings = singleClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SingleCategoryClassifyResult singleCategoryClassifyResult = new SingleCategoryClassifyResult( singleClassificationDocument.getId(), singleClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(singleClassificationDocument.getStatistics()), null); SingleCategoryClassifyResultPropertiesHelper.setClassification(singleCategoryClassifyResult, toDocumentClassification(classificationResult)); SingleCategoryClassifyResultPropertiesHelper.setWarnings(singleCategoryClassifyResult, new IterableStream<>(warnings)); return singleCategoryClassifyResult; } private static ClassificationCategory toDocumentClassification(ClassificationResult classificationResult) { final ClassificationCategory classificationCategory = new ClassificationCategory(); ClassificationCategoryPropertiesHelper.setCategory(classificationCategory, classificationResult.getCategory()); ClassificationCategoryPropertiesHelper.setConfidenceScore(classificationCategory, classificationResult.getConfidenceScore()); return classificationCategory; } /** * Helper method to convert {@link CustomMultiClassificationResult} to * {@link MultiCategoryClassifyResultCollection}. * * @param customMultiClassificationResult The {@link CustomMultiClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static MultiCategoryClassifyResultCollection toMultiCategoryClassifyResultCollection( CustomMultiLabelClassificationResult customMultiClassificationResult) { final List<MultiCategoryClassifyResult> multiCategoryClassifyResults = new ArrayList<>(); final List<CustomMultiLabelClassificationResultDocumentsItem> multiClassificationDocuments = customMultiClassificationResult.getDocuments(); for (CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument : multiClassificationDocuments) { multiCategoryClassifyResults.add(toMultiCategoryClassifyResult(multiClassificationDocument)); } for (DocumentError documentError : customMultiClassificationResult.getErrors()) { multiCategoryClassifyResults.add(new MultiCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final MultiCategoryClassifyResultCollection resultCollection = new MultiCategoryClassifyResultCollection(multiCategoryClassifyResults); MultiCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customMultiClassificationResult.getProjectName()); MultiCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customMultiClassificationResult.getDeploymentName()); if (customMultiClassificationResult.getStatistics() != null) { MultiCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customMultiClassificationResult.getStatistics())); } return resultCollection; } private static MultiCategoryClassifyResult toMultiCategoryClassifyResult( CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument) { final List<ClassificationCategory> classificationCategories = multiClassificationDocument .getClassProperty() .stream() .map(classificationResult -> toDocumentClassification(classificationResult)) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = multiClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final MultiCategoryClassifyResult classifySingleCategoryResult = new MultiCategoryClassifyResult( multiClassificationDocument.getId(), multiClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(multiClassificationDocument.getStatistics()), null); final ClassificationCategoryCollection classifications = new ClassificationCategoryCollection( new IterableStream<>(classificationCategories)); ClassificationCategoryCollectionPropertiesHelper.setWarnings(classifications, new IterableStream<>(warnings)); MultiCategoryClassifyResultPropertiesHelper.setClassifications(classifySingleCategoryResult, classifications); return classifySingleCategoryResult; } /* * Parses the reference pointer to an index array that contains document, sentence, and opinion indexes. */ public static int[] parseRefPointerToIndexArray(String assessmentPointer) { final Matcher matcher = PATTERN.matcher(assessmentPointer); final boolean isMatched = matcher.find(); final int[] result = new int[3]; if (isMatched) { result[0] = Integer.parseInt(matcher.group(1)); result[1] = Integer.parseInt(matcher.group(2)); result[2] = Integer.parseInt(matcher.group(3)); } else { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("'%s' is not a valid assessment pointer.", assessmentPointer))); } return result; } /* * Find the specific sentence assessment in the document sentiment list by given the assessment reference pointer. */ public static SentenceAssessment findSentimentAssessment(String assessmentPointer, List<SentimentResponseDocumentsItem> documentSentiments) { final int[] assessmentIndexes = parseRefPointerToIndexArray(assessmentPointer); final int documentIndex = assessmentIndexes[0]; final int sentenceIndex = assessmentIndexes[1]; final int assessmentIndex = assessmentIndexes[2]; if (documentIndex >= documentSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid document index '%s' in '%s'.", documentIndex, assessmentPointer))); } final SentimentResponseDocumentsItem documentsentiment = documentSentiments.get(documentIndex); final List<com.azure.ai.textanalytics.implementation.models.SentenceSentiment> sentenceSentiments = documentsentiment.getSentences(); if (sentenceIndex >= sentenceSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid sentence index '%s' in '%s'.", sentenceIndex, assessmentPointer))); } final List<SentenceAssessment> assessments = sentenceSentiments.get(sentenceIndex).getAssessments(); if (assessmentIndex >= assessments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid assessment index '%s' in '%s'.", assessmentIndex, assessmentPointer))); } return assessments.get(assessmentIndex); } }
class Utility { public static final Duration DEFAULT_POLL_INTERVAL = Duration.ofSeconds(30); private static final ClientLogger LOGGER = new ClientLogger(Utility.class); private static final int NEUTRAL_SCORE_ZERO = 0; private static final int OPERATION_ID_LENGTH = 37; private static final String DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP = " private static final Pattern PATTERN; static { PATTERN = Pattern.compile(DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP); } private Utility() { } /** * Verify that list of documents are not null or empty. Otherwise, throw exception. * * @param documents A list of documents. * * @throws NullPointerException if {@code documents} is null. * @throws IllegalArgumentException if {@code documents} is empty. */ public static void inputDocumentsValidation(Iterable<?> documents) { Objects.requireNonNull(documents, "'documents' cannot be null."); final Iterator<?> iterator = documents.iterator(); if (!iterator.hasNext()) { throw new IllegalArgumentException("'documents' cannot be empty."); } } /** * Mapping a {@link ErrorResponseException} to {@link HttpResponseException} if exist. Otherwise, return * original {@link Throwable}. * * @param throwable A {@link Throwable}. * @return A {@link HttpResponseException} or the original throwable type. */ public static Throwable mapToHttpResponseExceptionIfExists(Throwable throwable) { if (throwable instanceof ErrorResponseException) { ErrorResponseException errorException = (ErrorResponseException) throwable; final ErrorResponse errorResponse = errorException.getValue(); com.azure.ai.textanalytics.models.TextAnalyticsError textAnalyticsError = null; if (errorResponse != null && errorResponse.getError() != null) { textAnalyticsError = toTextAnalyticsError(errorResponse.getError()); } return new HttpResponseException(errorException.getMessage(), errorException.getResponse(), textAnalyticsError); } return throwable; } /** * Given a list of documents will apply the indexing function to it and return the updated list. * * @param documents the inputs to apply the mapping function to. * @param mappingFunction the function which applies the index to the incoming input value. * @param <T> the type of items being returned in the list. * @return The list holding all the generic items combined. */ public static <T> List<T> mapByIndex(Iterable<String> documents, BiFunction<String, String, T> mappingFunction) { Objects.requireNonNull(documents, "'documents' cannot be null."); AtomicInteger i = new AtomicInteger(0); List<T> result = new ArrayList<>(); documents.forEach(document -> result.add(mappingFunction.apply(String.valueOf(i.getAndIncrement()), document)) ); return result; } /** * Convert {@link DocumentStatistics} to {@link TextDocumentStatistics} * * @param statistics the {@link DocumentStatistics} provided by the service. * @return the {@link TextDocumentStatistics} returned by the SDK. */ public static TextDocumentStatistics toTextDocumentStatistics(DocumentStatistics statistics) { return new TextDocumentStatistics(statistics.getCharactersCount(), statistics.getTransactionsCount()); } /** * Convert {@link RequestStatistics} to {@link TextDocumentBatchStatistics} * * @param statistics the {@link RequestStatistics} provided by the service. * @return the {@link TextDocumentBatchStatistics} returned by the SDK. */ public static TextDocumentBatchStatistics toBatchStatistics(RequestStatistics statistics) { return new TextDocumentBatchStatistics(statistics.getDocumentsCount(), statistics.getValidDocumentsCount(), statistics.getErroneousDocumentsCount(), statistics.getTransactionsCount()); } /** * Convert {@link Error} to {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * This function maps the service returned {@link Error inner error} to the top level * {@link com.azure.ai.textanalytics.models.TextAnalyticsError error}, if inner error present. * * @param error the {@link Error} returned by the service. * @return the {@link com.azure.ai.textanalytics.models.TextAnalyticsError} returned by the SDK. */ public static TextAnalyticsError toTextAnalyticsError(Error error) { final InnerErrorModel innerError = error.getInnererror(); if (innerError == null) { final ErrorCode errorCode = error.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(errorCode == null ? null : errorCode.toString()), error.getMessage(), error.getTarget()); } final InnerErrorCode innerErrorCodeValue = innerError.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(innerErrorCodeValue == null ? null : innerErrorCodeValue.toString()), innerError.getMessage(), innerError.getTarget()); } public static TextAnalyticsWarning toTextAnalyticsWarning( DocumentWarning warning) { final WarningCodeValue warningCodeValue = warning.getCode(); return new TextAnalyticsWarning( WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()), warning.getMessage()); } /** * Convert the incoming input {@link TextDocumentInput} to the service expected {@link MultiLanguageInput}. * * @param documents the user provided input in {@link TextDocumentInput} * @return the service required input {@link MultiLanguageInput} */ public static List<MultiLanguageInput> toMultiLanguageInput(Iterable<TextDocumentInput> documents) { List<MultiLanguageInput> multiLanguageInputs = new ArrayList<>(); for (TextDocumentInput textDocumentInput : documents) { multiLanguageInputs.add(new MultiLanguageInput().setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()).setLanguage(textDocumentInput.getLanguage())); } return multiLanguageInputs; } /** * Convert the incoming input {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * to a {@link TextAnalyticsException}. * * @param error the {@link com.azure.ai.textanalytics.models.TextAnalyticsError}. * @return the {@link TextAnalyticsException} to be thrown. */ public static TextAnalyticsException toTextAnalyticsException( com.azure.ai.textanalytics.models.TextAnalyticsError error) { return new TextAnalyticsException(error.getMessage(), error.getErrorCode(), error.getTarget()); } /** * Convert to a list of {@link LanguageInput} from {@link DetectLanguageInput}. * * @param documents The list of documents to detect languages for. * * @return a list of {@link LanguageInput}. */ public static List<LanguageInput> toLanguageInput(Iterable<DetectLanguageInput> documents) { final List<LanguageInput> multiLanguageInputs = new ArrayList<>(); documents.forEach(textDocumentInput -> multiLanguageInputs.add(new LanguageInput() .setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()) .setCountryHint(textDocumentInput.getCountryHint()))); return multiLanguageInputs; } /** * Extracts the operation ID from the 'operation-location' URL. An example of 'operation-location' is * https: * * @param operationLocation The URL specified in the 'Operation-Location' response header containing the * operation ID used to track the progress and obtain the ID of the analyze operation. * * @return The operation ID that tracks the long running operation progress. */ /** * Extract the next pagination link which contains the request parameter values, into map, * such as '$skip=20' and '$top=2'. * * @param nextLink the next pagination link. * * @return A map that holds the request parameter value of next pagination link. */ public static Map<String, Object> parseNextLink(String nextLink) { if (!CoreUtils.isNullOrEmpty(nextLink)) { final Map<String, Object> parameterMap = new HashMap<>(); final String[] strings = nextLink.split("\\?", 2); final String[] parameters = strings[1].split("&"); for (String parameter : parameters) { final String[] parameterPair = parameter.split("="); final String key = parameterPair[0]; final String value = parameterPair[1]; if ("showStats".equals(key)) { parameterMap.put(key, value); } else if ("$skip".equals(key) || "$top".equals(key)) { parameterMap.put(key, Integer.valueOf(value)); } else if ("skip".equals(key) || "top".equals(key)) { parameterMap.put("$" + key, Integer.valueOf(value)); } } return parameterMap; } return new HashMap<>(); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse( Response<SentimentResponse> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(response.getValue())); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(((SentimentTaskResult) response.getValue()).getResults())); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse( Response<LanguageResult> response) { final LanguageResult languageResult = response.getValue(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (DocumentLanguage documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings)))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { final LanguageDetectionResult languageResult = ((LanguageDetectionTaskResult) response.getValue()).getResults(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (LanguageDetectionDocumentResult documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings() .stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings) ))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse( final Response<KeyPhraseResult> response) { final KeyPhraseResult keyPhraseResult = response.getValue(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final KeyPhraseResult keyPhraseResult = ((KeyPhraseTaskResult) response.getValue()).getResults(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static RecognizeEntitiesResultCollection toRecognizeEntitiesResultCollectionResponse( final EntitiesResult entitiesResult) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); entitiesResult.getDocuments().forEach(documentEntities -> recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentEntities))); for (DocumentError documentError : entitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics())); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection( final Response<EntitiesResult> response) { EntitiesResult entitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(entitiesResult), entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics()))); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection2( final Response<AnalyzeTextTaskResult> response) { EntitiesTaskResult entitiesTaskResult = (EntitiesTaskResult) response.getValue(); final EntitiesResult results = entitiesTaskResult.getResults(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(results), results.getModelVersion(), results.getStatistics() == null ? null : toBatchStatistics(results.getStatistics()))); } public static List<RecognizeEntitiesResult> toRecognizeEntitiesResults(EntitiesResult results) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); results.getDocuments().forEach( documentEntities -> recognizeEntitiesResults.add(new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>( documentEntities.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))))); for (DocumentError documentError : results.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(EntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizeEntitiesResult toRecognizeEntitiesResult(CustomEntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse( final Response<PiiResult> response) { final PiiResult piiEntitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final PiiResult piiEntitiesResult = ((PiiTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static List<RecognizePiiEntitiesResult> toRecognizePiiEntitiesResults(PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map( entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); PiiEntityPropertiesHelper.setLength(piiEntity, entity.getLength()); return piiEntity; }) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(DocumentEntities documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizePiiEntitiesResultCollection toRecognizePiiEntitiesResultCollection( final PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); return piiEntity; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics())); } public static ExtractKeyPhrasesResultCollection toExtractKeyPhrasesResultCollection( final KeyPhraseResult keyPhraseResult) { final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics())); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollectionResponse( final Response<EntityLinkingResult> response) { final EntityLinkingResult entityLinkingResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollection( final Response<AnalyzeTextTaskResult> response) { final EntityLinkingResult entityLinkingResult = ((EntityLinkingTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static RecognizeLinkedEntitiesResultCollection toRecognizeLinkedEntitiesResultCollection( final EntityLinkingResult entityLinkingResult) { final List<RecognizeLinkedEntitiesResult> linkedEntitiesResults = entityLinkingResult.getDocuments().stream().map( documentLinkedEntities -> new RecognizeLinkedEntitiesResult( documentLinkedEntities.getId(), documentLinkedEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentLinkedEntities.getStatistics()), null, new LinkedEntityCollection(new IterableStream<>( documentLinkedEntities.getEntities().stream().map( linkedEntity -> { final LinkedEntity entity = new LinkedEntity( linkedEntity.getName(), new IterableStream<>( linkedEntity.getMatches().stream().map( match -> { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch( match.getText(), match.getConfidenceScore()); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, match.getOffset()); LinkedEntityMatchPropertiesHelper.setLength(linkedEntityMatch, match.getLength()); return linkedEntityMatch; }).collect(Collectors.toList())), linkedEntity.getLanguage(), linkedEntity.getId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(entity, linkedEntity.getBingId()); return entity; }).collect(Collectors.toList())), new IterableStream<>(documentLinkedEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))) ).collect(Collectors.toList()); for (DocumentError documentError : entityLinkingResult.getErrors()) { linkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeLinkedEntitiesResultCollection(linkedEntitiesResults, entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics())); } /** * Helper method to convert {@link SentimentResponse} to {@link AnalyzeSentimentResultCollection}. * * @param sentimentResponse The {@link SentimentResponse}. * * @return A {@link AnalyzeSentimentResultCollection}. */ public static AnalyzeSentimentResultCollection toAnalyzeSentimentResultCollection( SentimentResponse sentimentResponse) { final List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); final List<SentimentResponseDocumentsItem> documentSentiments = sentimentResponse.getDocuments(); for (SentimentResponseDocumentsItem documentSentiment : documentSentiments) { analyzeSentimentResults.add(toAnalyzeSentimentResult(documentSentiment, documentSentiments)); } for (DocumentError documentError : sentimentResponse.getErrors()) { analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new AnalyzeSentimentResultCollection(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert {@link ExtractiveSummarizationResult} to {@link ExtractSummaryResultCollection}. * * @param extractiveSummarizationResult The {@link ExtractiveSummarizationResult}. * * @return A {@link ExtractSummaryResultCollection}. */ public static ExtractSummaryResultCollection toExtractSummaryResultCollection( ExtractiveSummarizationResult extractiveSummarizationResult) { final List<ExtractSummaryResult> extractSummaryResults = new ArrayList<>(); final List<ExtractiveSummarizationResultDocumentsItem> extractedDocumentSummaries = extractiveSummarizationResult.getDocuments(); for (ExtractiveSummarizationResultDocumentsItem documentSummary : extractedDocumentSummaries) { extractSummaryResults.add(toExtractSummaryResult(documentSummary)); } for (DocumentError documentError : extractiveSummarizationResult.getErrors()) { extractSummaryResults.add(new ExtractSummaryResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } return new ExtractSummaryResultCollection(extractSummaryResults, extractiveSummarizationResult.getModelVersion(), extractiveSummarizationResult.getStatistics() == null ? null : toBatchStatistics(extractiveSummarizationResult.getStatistics())); } /** * Transfer {@link HealthcareResult} into {@link AnalyzeHealthcareEntitiesResultCollection}. * * @param healthcareResult the service side raw data, HealthcareResult. * * @return the client side explored model, AnalyzeHealthcareEntitiesResultCollection. */ public static AnalyzeHealthcareEntitiesResultCollection toAnalyzeHealthcareEntitiesResultCollection( HealthcareResult healthcareResult) { List<AnalyzeHealthcareEntitiesResult> analyzeHealthcareEntitiesResults = new ArrayList<>(); healthcareResult.getDocuments().forEach( documentEntities -> { final AnalyzeHealthcareEntitiesResult analyzeHealthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( textAnalyticsWarning -> new TextAnalyticsWarning( Optional.ofNullable(textAnalyticsWarning.getCode()) .map(warningCodeValue -> WarningCode.fromString(warningCodeValue.toString())) .orElse(null), textAnalyticsWarning.getMessage()) ).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setWarnings(analyzeHealthcareEntitiesResult, IterableStream.of(warnings)); final List<HealthcareEntity> healthcareEntities = documentEntities.getEntities().stream().map( entity -> { final HealthcareEntity healthcareEntity = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity, entity.getText()); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity, entity.getName()); if (entity.getCategory() != null) { HealthcareEntityPropertiesHelper.setCategory(healthcareEntity, HealthcareEntityCategory.fromString(entity.getCategory().toString())); } HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity, entity.getConfidenceScore()); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity, entity.getOffset()); HealthcareEntityPropertiesHelper.setLength(healthcareEntity, entity.getLength()); final List<EntityDataSource> entityDataSources = Optional.ofNullable(entity.getLinks()).map( links -> links.stream().map( link -> { final EntityDataSource dataSource = new EntityDataSource(); EntityDataSourcePropertiesHelper.setName(dataSource, link.getDataSource()); EntityDataSourcePropertiesHelper.setEntityId(dataSource, link.getId()); return dataSource; } ).collect(Collectors.toList())) .orElse(new ArrayList<>()); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity, IterableStream.of(entityDataSources)); final HealthcareAssertion assertion = entity.getAssertion(); if (assertion != null) { HealthcareEntityPropertiesHelper.setAssertion(healthcareEntity, toHealthcareEntityAssertion(assertion)); } return healthcareEntity; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntities)); final List<HealthcareEntityRelation> healthcareEntityRelations = documentEntities.getRelations().stream().map( healthcareRelation -> { final HealthcareEntityRelation entityRelation = new HealthcareEntityRelation(); final RelationType relationType = healthcareRelation.getRelationType(); if (relationType != null) { HealthcareEntityRelationPropertiesHelper.setRelationType(entityRelation, HealthcareEntityRelationType.fromString(relationType.toString())); } final List<HealthcareEntityRelationRole> relationRoles = healthcareRelation.getEntities().stream().map( relationEntity -> { final HealthcareEntityRelationRole relationRole = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(relationRole, relationEntity.getRole()); HealthcareEntityRelationRolePropertiesHelper.setEntity(relationRole, healthcareEntities.get(getHealthcareEntityIndex(relationEntity.getRef()))); return relationRole; }).collect(Collectors.toList()); HealthcareEntityRelationPropertiesHelper.setRoles(entityRelation, IterableStream.of(relationRoles)); return entityRelation; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntityRelations)); analyzeHealthcareEntitiesResults.add(analyzeHealthcareEntitiesResult); }); healthcareResult.getErrors().forEach(documentError -> analyzeHealthcareEntitiesResults.add(new AnalyzeHealthcareEntitiesResult( documentError.getId(), null, toTextAnalyticsError(documentError.getError()))) ); return new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(analyzeHealthcareEntitiesResults)); } public static HealthcareEntityAssertion toHealthcareEntityAssertion(HealthcareAssertion healthcareAssertion) { final Association association = healthcareAssertion.getAssociation(); final Certainty certainty = healthcareAssertion.getCertainty(); final Conditionality conditionality = healthcareAssertion.getConditionality(); final HealthcareEntityAssertion entityAssertion = new HealthcareEntityAssertion(); if (association != null) { HealthcareEntityAssertionPropertiesHelper.setAssociation(entityAssertion, EntityAssociation.fromString(association.toString())); } if (certainty != null) { HealthcareEntityAssertionPropertiesHelper.setCertainty(entityAssertion, toCertainty(certainty)); } if (conditionality != null) { HealthcareEntityAssertionPropertiesHelper.setConditionality(entityAssertion, toConditionality(conditionality)); } return entityAssertion; } private static EntityCertainty toCertainty(Certainty certainty) { EntityCertainty entityCertainty1 = null; switch (certainty) { case POSITIVE: entityCertainty1 = EntityCertainty.POSITIVE; break; case POSITIVE_POSSIBLE: entityCertainty1 = EntityCertainty.POSITIVE_POSSIBLE; break; case NEUTRAL_POSSIBLE: entityCertainty1 = EntityCertainty.NEUTRAL_POSSIBLE; break; case NEGATIVE_POSSIBLE: entityCertainty1 = EntityCertainty.NEGATIVE_POSSIBLE; break; case NEGATIVE: entityCertainty1 = EntityCertainty.NEGATIVE; break; default: break; } return entityCertainty1; } private static EntityConditionality toConditionality(Conditionality conditionality) { EntityConditionality conditionality1 = null; switch (conditionality) { case HYPOTHETICAL: conditionality1 = EntityConditionality.HYPOTHETICAL; break; case CONDITIONAL: conditionality1 = EntityConditionality.CONDITIONAL; break; default: break; } return conditionality1; } /** * Helper function that parse healthcare entity index from the given entity reference string. * The entity reference format is " * * @param entityReference the given healthcare entity reference string. * * @return the healthcare entity index. */ private static Integer getHealthcareEntityIndex(String entityReference) { if (!CoreUtils.isNullOrEmpty(entityReference)) { int lastIndex = entityReference.lastIndexOf('/'); if (lastIndex != -1) { return Integer.parseInt(entityReference.substring(lastIndex + 1)); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse healthcare entity index from: " + entityReference)); } /** * Get the non-null {@link Context}. The default value is {@link Context * * @param context It offers a means of passing arbitrary data (key-value pairs) to pipeline policies. * Most applications do not need to pass arbitrary data to the pipeline and can pass Context.NONE or null. * * @return The Context. */ public static Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } /** * Helper function which retrieves the size of an {@link Iterable}. * * @param documents The iterable of documents. * @return Count of documents in the iterable. */ public static int getDocumentCount(Iterable<?> documents) { if (documents instanceof Collection) { return ((Collection<?>) documents).size(); } else { final int[] count = new int[] { 0 }; documents.forEach(ignored -> count[0] += 1); return count[0]; } } /** * Helper function which convert the {@code Iterable<PiiEntityCategory>} to {@code List<PiiCategory>}. * * @param categoriesFilter the iterable of {@link PiiEntityCategory}. * @return the list of {@link PiiCategory}. */ public static List<PiiCategory> toCategoriesFilter(Iterable<PiiEntityCategory> categoriesFilter) { if (categoriesFilter == null) { return null; } final List<PiiCategory> piiCategories = new ArrayList<>(); categoriesFilter.forEach(category -> piiCategories.add(PiiCategory.fromString(category.toString()))); return piiCategories; } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment The {@link SentimentResponseDocumentsItem} returned by the service. * @param documentSentimentList The document sentiment list returned by the service. * * @return The {@link AnalyzeSentimentResult} to be returned by the SDK. */ private static AnalyzeSentimentResult toAnalyzeSentimentResult(SentimentResponseDocumentsItem documentSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getConfidenceScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { final SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getConfidenceScores(); final SentenceSentimentValue sentenceSentimentValue = sentenceSentiment.getSentiment(); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(sentenceSentiment.getText(), TextSentiment.fromString(sentenceSentimentValue == null ? null : sentenceSentimentValue.toString()), new SentimentConfidenceScores(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive())); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, toSentenceOpinionList(sentenceSentiment, documentSentimentList)); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, sentenceSentiment.getOffset()); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, sentenceSentiment.getLength()); return sentenceSentiment1; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSentiment.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final DocumentSentimentValue documentSentimentValue = documentSentiment.getSentiment(); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( TextSentiment.fromString(documentSentimentValue == null ? null : documentSentimentValue.toString()), new SentimentConfidenceScores( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), new IterableStream<>(sentenceSentiments), new IterableStream<>(warnings) )); } /* * Transform SentenceSentiment's opinion mining to output that user can use. */ private static IterableStream<SentenceOpinion> toSentenceOpinionList( com.azure.ai.textanalytics.implementation.models.SentenceSentiment sentenceSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final List<SentenceTarget> sentenceTargets = sentenceSentiment.getTargets(); if (sentenceTargets == null) { return null; } final List<SentenceOpinion> sentenceOpinions = new ArrayList<>(); sentenceTargets.forEach(sentenceTarget -> { final List<AssessmentSentiment> assessmentSentiments = new ArrayList<>(); sentenceTarget.getRelations().forEach(targetRelation -> { final TargetRelationType targetRelationType = targetRelation.getRelationType(); final String opinionPointer = targetRelation.getRef(); if (TargetRelationType.ASSESSMENT == targetRelationType) { assessmentSentiments.add(toAssessmentSentiment( findSentimentAssessment(opinionPointer, documentSentimentList))); } }); final TargetSentiment targetSentiment = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment, sentenceTarget.getText()); TargetSentimentPropertiesHelper.setSentiment(targetSentiment, TextSentiment.fromString(sentenceTarget.getSentiment().toString())); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment, toSentimentConfidenceScores(sentenceTarget.getConfidenceScores())); TargetSentimentPropertiesHelper.setOffset(targetSentiment, sentenceTarget.getOffset()); TargetSentimentPropertiesHelper.setLength(targetSentiment, sentenceTarget.getLength()); final SentenceOpinion sentenceOpinion = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion, targetSentiment); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion, new IterableStream<>(assessmentSentiments)); sentenceOpinions.add(sentenceOpinion); }); return new IterableStream<>(sentenceOpinions); } /* * Transform type TargetConfidenceScoreLabel to SentimentConfidenceScores. */ private static SentimentConfidenceScores toSentimentConfidenceScores( TargetConfidenceScoreLabel targetConfidenceScoreLabel) { return new SentimentConfidenceScores(targetConfidenceScoreLabel.getNegative(), NEUTRAL_SCORE_ZERO, targetConfidenceScoreLabel.getPositive()); } /* * Transform type SentenceOpinion to OpinionSentiment. */ private static AssessmentSentiment toAssessmentSentiment(SentenceAssessment sentenceAssessment) { final AssessmentSentiment assessmentSentiment = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment, sentenceAssessment.getText()); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment, TextSentiment.fromString(sentenceAssessment.getSentiment().toString())); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment, toSentimentConfidenceScores(sentenceAssessment.getConfidenceScores())); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment, sentenceAssessment.isNegated()); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment, sentenceAssessment.getOffset()); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment, sentenceAssessment.getLength()); return assessmentSentiment; } private static ExtractSummaryResult toExtractSummaryResult( ExtractiveSummarizationResultDocumentsItem documentSummary) { final List<ExtractedSummarySentence> sentences = documentSummary.getSentences(); final List<SummarySentence> summarySentences = sentences.stream().map(sentence -> { final SummarySentence summarySentence = new SummarySentence(); SummarySentencePropertiesHelper.setText(summarySentence, sentence.getText()); SummarySentencePropertiesHelper.setRankScore(summarySentence, sentence.getRankScore()); SummarySentencePropertiesHelper.setLength(summarySentence, sentence.getLength()); SummarySentencePropertiesHelper.setOffset(summarySentence, sentence.getOffset()); return summarySentence; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSummary.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SummarySentenceCollection summarySentenceCollection = new SummarySentenceCollection( new IterableStream<>(summarySentences), new IterableStream<>(warnings) ); final ExtractSummaryResult extractSummaryResult = new ExtractSummaryResult(documentSummary.getId(), documentSummary.getStatistics() == null ? null : toTextDocumentStatistics(documentSummary.getStatistics()), null ); ExtractSummaryResultPropertiesHelper.setSentences(extractSummaryResult, summarySentenceCollection); return extractSummaryResult; } /** * Helper method to convert {@link CustomEntitiesResult} to {@link RecognizeCustomEntitiesResultCollection}. * * @param customEntitiesResult The {@link CustomEntitiesResult}. * * @return A {@link RecognizeCustomEntitiesResultCollection}. */ public static RecognizeCustomEntitiesResultCollection toRecognizeCustomEntitiesResultCollection( CustomEntitiesResult customEntitiesResult) { final List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); final List<CustomEntitiesResultDocumentsItem> customEntitiesResultDocuments = customEntitiesResult.getDocuments(); for (CustomEntitiesResultDocumentsItem documentSummary : customEntitiesResultDocuments) { recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentSummary)); } for (DocumentError documentError : customEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } final RecognizeCustomEntitiesResultCollection resultCollection = new RecognizeCustomEntitiesResultCollection(recognizeEntitiesResults); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setProjectName(resultCollection, customEntitiesResult.getProjectName()); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customEntitiesResult.getDeploymentName()); if (customEntitiesResult.getStatistics() != null) { RecognizeCustomEntitiesResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customEntitiesResult.getStatistics())); } return resultCollection; } /** * Helper method to convert {@link CustomSingleClassificationResult} to * {@link SingleCategoryClassifyResultCollection}. * * @param customSingleClassificationResult The {@link CustomSingleClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static SingleCategoryClassifyResultCollection toSingleCategoryClassifyResultCollection( CustomSingleLabelClassificationResult customSingleClassificationResult) { final List<SingleCategoryClassifyResult> singleCategoryClassifyResults = new ArrayList<>(); final List<CustomSingleLabelClassificationResultDocumentsItem> singleClassificationDocuments = customSingleClassificationResult.getDocuments(); for (CustomSingleLabelClassificationResultDocumentsItem documentSummary : singleClassificationDocuments) { singleCategoryClassifyResults.add(toSingleCategoryClassifyResult(documentSummary)); } for (DocumentError documentError : customSingleClassificationResult.getErrors()) { singleCategoryClassifyResults.add(new SingleCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final SingleCategoryClassifyResultCollection resultCollection = new SingleCategoryClassifyResultCollection(singleCategoryClassifyResults); SingleCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customSingleClassificationResult.getProjectName()); SingleCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customSingleClassificationResult.getDeploymentName()); if (customSingleClassificationResult.getStatistics() != null) { SingleCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customSingleClassificationResult.getStatistics())); } return resultCollection; } private static SingleCategoryClassifyResult toSingleCategoryClassifyResult( CustomSingleLabelClassificationResultDocumentsItem singleClassificationDocument) { final ClassificationResult classificationResult = singleClassificationDocument.getClassProperty(); final List<TextAnalyticsWarning> warnings = singleClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SingleCategoryClassifyResult singleCategoryClassifyResult = new SingleCategoryClassifyResult( singleClassificationDocument.getId(), singleClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(singleClassificationDocument.getStatistics()), null); SingleCategoryClassifyResultPropertiesHelper.setClassification(singleCategoryClassifyResult, toDocumentClassification(classificationResult)); SingleCategoryClassifyResultPropertiesHelper.setWarnings(singleCategoryClassifyResult, new IterableStream<>(warnings)); return singleCategoryClassifyResult; } private static ClassificationCategory toDocumentClassification(ClassificationResult classificationResult) { final ClassificationCategory classificationCategory = new ClassificationCategory(); ClassificationCategoryPropertiesHelper.setCategory(classificationCategory, classificationResult.getCategory()); ClassificationCategoryPropertiesHelper.setConfidenceScore(classificationCategory, classificationResult.getConfidenceScore()); return classificationCategory; } /** * Helper method to convert {@link CustomMultiClassificationResult} to * {@link MultiCategoryClassifyResultCollection}. * * @param customMultiClassificationResult The {@link CustomMultiClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static MultiCategoryClassifyResultCollection toMultiCategoryClassifyResultCollection( CustomMultiLabelClassificationResult customMultiClassificationResult) { final List<MultiCategoryClassifyResult> multiCategoryClassifyResults = new ArrayList<>(); final List<CustomMultiLabelClassificationResultDocumentsItem> multiClassificationDocuments = customMultiClassificationResult.getDocuments(); for (CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument : multiClassificationDocuments) { multiCategoryClassifyResults.add(toMultiCategoryClassifyResult(multiClassificationDocument)); } for (DocumentError documentError : customMultiClassificationResult.getErrors()) { multiCategoryClassifyResults.add(new MultiCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final MultiCategoryClassifyResultCollection resultCollection = new MultiCategoryClassifyResultCollection(multiCategoryClassifyResults); MultiCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customMultiClassificationResult.getProjectName()); MultiCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customMultiClassificationResult.getDeploymentName()); if (customMultiClassificationResult.getStatistics() != null) { MultiCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customMultiClassificationResult.getStatistics())); } return resultCollection; } private static MultiCategoryClassifyResult toMultiCategoryClassifyResult( CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument) { final List<ClassificationCategory> classificationCategories = multiClassificationDocument .getClassProperty() .stream() .map(classificationResult -> toDocumentClassification(classificationResult)) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = multiClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final MultiCategoryClassifyResult classifySingleCategoryResult = new MultiCategoryClassifyResult( multiClassificationDocument.getId(), multiClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(multiClassificationDocument.getStatistics()), null); final ClassificationCategoryCollection classifications = new ClassificationCategoryCollection( new IterableStream<>(classificationCategories)); ClassificationCategoryCollectionPropertiesHelper.setWarnings(classifications, new IterableStream<>(warnings)); MultiCategoryClassifyResultPropertiesHelper.setClassifications(classifySingleCategoryResult, classifications); return classifySingleCategoryResult; } /* * Parses the reference pointer to an index array that contains document, sentence, and opinion indexes. */ public static int[] parseRefPointerToIndexArray(String assessmentPointer) { final Matcher matcher = PATTERN.matcher(assessmentPointer); final boolean isMatched = matcher.find(); final int[] result = new int[3]; if (isMatched) { result[0] = Integer.parseInt(matcher.group(1)); result[1] = Integer.parseInt(matcher.group(2)); result[2] = Integer.parseInt(matcher.group(3)); } else { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("'%s' is not a valid assessment pointer.", assessmentPointer))); } return result; } /* * Find the specific sentence assessment in the document sentiment list by given the assessment reference pointer. */ public static SentenceAssessment findSentimentAssessment(String assessmentPointer, List<SentimentResponseDocumentsItem> documentSentiments) { final int[] assessmentIndexes = parseRefPointerToIndexArray(assessmentPointer); final int documentIndex = assessmentIndexes[0]; final int sentenceIndex = assessmentIndexes[1]; final int assessmentIndex = assessmentIndexes[2]; if (documentIndex >= documentSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid document index '%s' in '%s'.", documentIndex, assessmentPointer))); } final SentimentResponseDocumentsItem documentsentiment = documentSentiments.get(documentIndex); final List<com.azure.ai.textanalytics.implementation.models.SentenceSentiment> sentenceSentiments = documentsentiment.getSentences(); if (sentenceIndex >= sentenceSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid sentence index '%s' in '%s'.", sentenceIndex, assessmentPointer))); } final List<SentenceAssessment> assessments = sentenceSentiments.get(sentenceIndex).getAssessments(); if (assessmentIndex >= assessments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid assessment index '%s' in '%s'.", assessmentIndex, assessmentPointer))); } return assessments.get(assessmentIndex); } }
If the cluster is using FQDN, but the host specified in the external OLAP table is IP, this check will not work. But I think it's OK to keep this simple logic, because creating OLAP External table mapping to self is a rare case, and using FQDN but specified IP is rare too.
public boolean checkFeExistByRPCPort(String host, int rpcPort) { try { tryLock(true); return frontends .values() .stream() .anyMatch(fe -> fe.getHost().equals(host) && fe.getRpcPort() == rpcPort); } finally { unlock(); } }
.anyMatch(fe -> fe.getHost().equals(host) && fe.getRpcPort() == rpcPort);
public boolean checkFeExistByRPCPort(String host, int rpcPort) { try { tryLock(true); return frontends .values() .stream() .anyMatch(fe -> fe.getHost().equals(host) && fe.getRpcPort() == rpcPort); } finally { unlock(); } }
class NodeMgr { private static final Logger LOG = LogManager.getLogger(NodeMgr.class); private static final int HTTP_TIMEOUT_SECOND = 5; /** * LeaderInfo */ @SerializedName(value = "r") private int leaderRpcPort; @SerializedName(value = "h") private int leaderHttpPort; @SerializedName(value = "ip") private String leaderIp; /** * Frontends * <p> * frontends : name -> Frontend * removedFrontends: removed frontends' name. used for checking if name is duplicated in bdbje */ @SerializedName(value = "f") private ConcurrentHashMap<String, Frontend> frontends = new ConcurrentHashMap<>(); @SerializedName(value = "rf") private ConcurrentLinkedQueue<String> removedFrontends = new ConcurrentLinkedQueue<>(); /** * Backends and Compute Node */ @SerializedName(value = "s") private SystemInfoService systemInfo; /** * Broker */ @SerializedName(value = "b") private BrokerMgr brokerMgr; private boolean isFirstTimeStartUp = false; private boolean isElectable; private String nodeName; private FrontendNodeType role; private int clusterId; private String token; private String runMode; private String imageDir; private final List<Pair<String, Integer>> helperNodes = Lists.newArrayList(); private Pair<String, Integer> selfNode = null; private final Map<Integer, SystemInfoService> systemInfoMap = new ConcurrentHashMap<>(); public NodeMgr() { this.role = FrontendNodeType.UNKNOWN; this.leaderRpcPort = 0; this.leaderHttpPort = 0; this.leaderIp = ""; this.systemInfo = new SystemInfoService(); this.brokerMgr = new BrokerMgr(); } public void initialize(String[] args) throws Exception { getCheckedSelfHostPort(); getHelperNodes(args); } private boolean tryLock(boolean mustLock) { return GlobalStateMgr.getCurrentState().tryLock(mustLock); } private void unlock() { GlobalStateMgr.getCurrentState().unlock(); } public List<Frontend> getFrontends(FrontendNodeType nodeType) { if (nodeType == null) { return Lists.newArrayList(frontends.values()); } List<Frontend> result = Lists.newArrayList(); for (Frontend frontend : frontends.values()) { if (frontend.getRole() == nodeType) { result.add(frontend); } } return result; } public List<String> getRemovedFrontendNames() { return Lists.newArrayList(removedFrontends); } public SystemInfoService getOrCreateSystemInfo(Integer clusterId) { SystemInfoService systemInfoService = systemInfoMap.get(clusterId); if (systemInfoService == null) { systemInfoService = new SystemInfoService(); systemInfoMap.put(clusterId, systemInfoService); } return systemInfoService; } public SystemInfoService getClusterInfo() { return this.systemInfo; } public BrokerMgr getBrokerMgr() { return brokerMgr; } public void getClusterIdAndRoleOnStartup() throws IOException { File roleFile = new File(this.imageDir, Storage.ROLE_FILE); File versionFile = new File(this.imageDir, Storage.VERSION_FILE); boolean isVersionFileChanged = false; Storage storage = new Storage(this.imageDir); if (isMyself() || (roleFile.exists() && versionFile.exists())) { if (!isMyself()) { LOG.info("find ROLE and VERSION file in local, ignore helper nodes: {}", helperNodes); } if ((roleFile.exists() && !versionFile.exists()) || (!roleFile.exists() && versionFile.exists())) { LOG.error("role file and version file must both exist or both not exist. " + "please specific one helper node to recover. will exit."); System.exit(-1); } if (!roleFile.exists()) { role = FrontendNodeType.FOLLOWER; nodeName = GlobalStateMgr.genFeNodeName(selfNode.first, selfNode.second, false /* new style */); storage.writeFrontendRoleAndNodeName(role, nodeName); LOG.info("very first time to start this node. role: {}, node name: {}", role.name(), nodeName); } else { role = storage.getRole(); nodeName = storage.getNodeName(); if (Strings.isNullOrEmpty(nodeName)) { nodeName = GlobalStateMgr.genFeNodeName(selfNode.first, selfNode.second, true/* old style */); storage.writeFrontendRoleAndNodeName(role, nodeName); LOG.info("forward compatibility. role: {}, node name: {}", role.name(), nodeName); } } Preconditions.checkNotNull(role); Preconditions.checkNotNull(nodeName); if (!versionFile.exists()) { clusterId = Config.cluster_id == -1 ? Storage.newClusterID() : Config.cluster_id; token = Strings.isNullOrEmpty(Config.auth_token) ? Storage.newToken() : Config.auth_token; storage = new Storage(clusterId, token, this.imageDir); isVersionFileChanged = true; isFirstTimeStartUp = true; Frontend self = new Frontend(role, nodeName, selfNode.first, selfNode.second); frontends.put(nodeName, self); } else { clusterId = storage.getClusterID(); if (storage.getToken() == null) { token = Strings.isNullOrEmpty(Config.auth_token) ? Storage.newToken() : Config.auth_token; LOG.info("new token={}", token); storage.setToken(token); isVersionFileChanged = true; } else { token = storage.getToken(); } runMode = storage.getRunMode(); isFirstTimeStartUp = false; } } else { while (true) { if (!getFeNodeTypeAndNameFromHelpers()) { LOG.warn("current node is not added to the group. please add it first. " + "sleep 5 seconds and retry, current helper nodes: {}", helperNodes); try { Thread.sleep(5000); continue; } catch (InterruptedException e) { LOG.warn(e); System.exit(-1); } } break; } Preconditions.checkState(helperNodes.size() == 1); Preconditions.checkNotNull(role); Preconditions.checkNotNull(nodeName); Pair<String, Integer> rightHelperNode = helperNodes.get(0); storage = new Storage(this.imageDir); if (roleFile.exists() && (role != storage.getRole() || !nodeName.equals(storage.getNodeName())) || !roleFile.exists()) { storage.writeFrontendRoleAndNodeName(role, nodeName); } if (!versionFile.exists()) { if (!getVersionFileFromHelper(rightHelperNode)) { System.exit(-1); } storage = new Storage(this.imageDir); clusterId = storage.getClusterID(); token = storage.getToken(); runMode = storage.getRunMode(); if (Strings.isNullOrEmpty(token)) { token = Config.auth_token; isVersionFileChanged = true; } if (Strings.isNullOrEmpty(runMode)) { runMode = RunMode.SHARED_NOTHING.getName(); storage.setRunMode(runMode); isVersionFileChanged = true; } } else { clusterId = storage.getClusterID(); token = storage.getToken(); runMode = storage.getRunMode(); if (Strings.isNullOrEmpty(runMode)) { runMode = RunMode.SHARED_NOTHING.getName(); storage.setRunMode(runMode); isVersionFileChanged = true; } try { URL idURL = new URL("http: HttpURLConnection conn = null; conn = (HttpURLConnection) idURL.openConnection(); conn.setConnectTimeout(2 * 1000); conn.setReadTimeout(2 * 1000); String clusterIdString = conn.getHeaderField(MetaBaseAction.CLUSTER_ID); int remoteClusterId = Integer.parseInt(clusterIdString); if (remoteClusterId != clusterId) { LOG.error("cluster id is not equal with helper node {}. will exit.", rightHelperNode.first); System.exit(-1); } String remoteToken = conn.getHeaderField(MetaBaseAction.TOKEN); if (token == null && remoteToken != null) { LOG.info("get token from helper node. token={}.", remoteToken); token = remoteToken; isVersionFileChanged = true; storage.reload(); } if (Config.enable_token_check) { Preconditions.checkNotNull(token); Preconditions.checkNotNull(remoteToken); if (!token.equals(remoteToken)) { LOG.error("token is not equal with helper node {}. will exit.", rightHelperNode.first); System.exit(-1); } } String remoteRunMode = conn.getHeaderField(MetaBaseAction.RUN_MODE); if (Strings.isNullOrEmpty(remoteRunMode)) { remoteRunMode = RunMode.SHARED_NOTHING.getName(); } if (!runMode.equalsIgnoreCase(remoteRunMode)) { LOG.error("Unmatched run mode with helper node {}: {} vs {}, will exit .", rightHelperNode.first, runMode, remoteRunMode); System.exit(-1); } } catch (Exception e) { LOG.warn("fail to check cluster_id and token with helper node.", e); System.exit(-1); } } getNewImageOnStartup(rightHelperNode, ""); if (RunMode.allowCreateLakeTable()) { String subDir = this.imageDir + StarMgrServer.IMAGE_SUBDIR; File dir = new File(subDir); if (!dir.exists()) { LOG.info("create image dir for {}.", dir.getAbsolutePath()); if (!dir.mkdir()) { LOG.error("create image dir for star mgr failed! exit now."); System.exit(-1); } } getNewImageOnStartup(rightHelperNode, StarMgrServer.IMAGE_SUBDIR); } } if (Config.cluster_id != -1 && clusterId != Config.cluster_id) { LOG.error("cluster id is not equal with config item cluster_id. will exit."); System.exit(-1); } if (Strings.isNullOrEmpty(runMode)) { if (isFirstTimeStartUp) { runMode = RunMode.name(); storage.setRunMode(runMode); isVersionFileChanged = true; } else if (RunMode.allowCreateLakeTable()) { LOG.error("Upgrading from a cluster with version less than 3.0 to a cluster with run mode {} of " + "version 3.0 or above is disallowed. will exit", RunMode.name()); System.exit(-1); } } else if (!runMode.equalsIgnoreCase(RunMode.name())) { LOG.error("Unmatched run mode between config file and version file: {} vs {}. will exit! ", RunMode.name(), runMode); System.exit(-1); } if (isVersionFileChanged) { storage.writeVersionFile(); } LOG.info("Current run_mode is {}", runMode); isElectable = role.equals(FrontendNodeType.FOLLOWER); systemInfoMap.put(clusterId, systemInfo); Preconditions.checkState(helperNodes.size() == 1); LOG.info("Got cluster id: {}, role: {}, node name: {} and run_mode: {}", clusterId, role.name(), nodeName, runMode); } private boolean getFeNodeTypeAndNameFromHelpers() { Pair<String, Integer> rightHelperNode = null; for (Pair<String, Integer> helperNode : helperNodes) { try { URL url = new URL("http: + "/role?host=" + selfNode.first + "&port=" + selfNode.second); HttpURLConnection conn = null; conn = (HttpURLConnection) url.openConnection(); if (conn.getResponseCode() != 200) { LOG.warn("failed to get fe node type from helper node: {}. response code: {}", helperNode, conn.getResponseCode()); continue; } String type = conn.getHeaderField("role"); if (type == null) { LOG.warn("failed to get fe node type from helper node: {}.", helperNode); continue; } role = FrontendNodeType.valueOf(type); nodeName = conn.getHeaderField("name"); if (role == FrontendNodeType.UNKNOWN) { LOG.warn("frontend {} is not added to cluster yet. role UNKNOWN", selfNode); return false; } if (Strings.isNullOrEmpty(nodeName)) { nodeName = GlobalStateMgr.genFeNodeName(selfNode.first, selfNode.second, true /* old style */); } } catch (Exception e) { LOG.warn("failed to get fe node type from helper node: {}.", helperNode, e); continue; } LOG.info("get fe node type {}, name {} from {}:{}", role, nodeName, helperNode.first, Config.http_port); rightHelperNode = helperNode; break; } if (rightHelperNode == null) { return false; } helperNodes.clear(); helperNodes.add(rightHelperNode); return true; } private boolean isMyself() { Preconditions.checkNotNull(selfNode); Preconditions.checkNotNull(helperNodes); LOG.debug("self: {}. helpers: {}", selfNode, helperNodes); boolean containSelf = false; for (Pair<String, Integer> helperNode : helperNodes) { if (selfNode.equals(helperNode)) { containSelf = true; } } if (containSelf) { helperNodes.clear(); helperNodes.add(selfNode); } return containSelf; } public long loadFrontends(DataInputStream dis, long checksum) throws IOException { int size = dis.readInt(); long newChecksum = checksum ^ size; for (int i = 0; i < size; i++) { Frontend fe = Frontend.read(dis); replayAddFrontend(fe); } size = dis.readInt(); newChecksum ^= size; for (int i = 0; i < size; i++) { removedFrontends.add(Text.readString(dis)); } LOG.info("finished replay frontends from image"); return newChecksum; } public long saveFrontends(DataOutputStream dos, long checksum) throws IOException { int size = frontends.size(); checksum ^= size; dos.writeInt(size); for (Frontend fe : frontends.values()) { fe.write(dos); } size = removedFrontends.size(); checksum ^= size; dos.writeInt(size); for (String feName : removedFrontends) { Text.writeString(dos, feName); } return checksum; } public long loadBackends(DataInputStream dis, long checksum) throws IOException { return systemInfo.loadBackends(dis, checksum); } public long saveBackends(DataOutputStream dos, long checksum) throws IOException { return systemInfo.saveBackends(dos, checksum); } public long loadComputeNodes(DataInputStream dis, long checksum) throws IOException { return systemInfo.loadComputeNodes(dis, checksum); } public long saveComputeNodes(DataOutputStream dos, long checksum) throws IOException { return systemInfo.saveComputeNodes(dos, checksum); } private StorageInfo getStorageInfo(URL url) throws IOException { ObjectMapper mapper = new ObjectMapper(); HttpURLConnection connection = null; try { connection = (HttpURLConnection) url.openConnection(); connection.setConnectTimeout(HTTP_TIMEOUT_SECOND * 1000); connection.setReadTimeout(HTTP_TIMEOUT_SECOND * 1000); return mapper.readValue(connection.getInputStream(), StorageInfo.class); } finally { if (connection != null) { connection.disconnect(); } } } private void getHelperNodes(String[] args) throws AnalysisException { String helpers = null; for (int i = 0; i < args.length; i++) { if (args[i].equalsIgnoreCase("-helper")) { if (i + 1 >= args.length) { System.out.println("-helper need parameter host:port,host:port"); System.exit(-1); } helpers = args[i + 1]; if (!helpers.contains(":")) { System.out.print("helper's format seems was wrong [" + helpers + "]"); System.out.println(", eg. host:port,host:port"); System.exit(-1); } break; } } if (helpers != null) { String[] splittedHelpers = helpers.split(","); for (String helper : splittedHelpers) { Pair<String, Integer> helperHostPort = SystemInfoService.validateHostAndPort(helper); if (helperHostPort.equals(selfNode)) { /* * If user specified the helper node to this FE itself, * we will stop the starting FE process and report an error. * First, it is meaningless to point the helper to itself. * Secondly, when some users add FE for the first time, they will mistakenly * point the helper that should have pointed to the Master to themselves. * In this case, some errors have caused users to be troubled. * So here directly exit the program and inform the user to avoid unnecessary trouble. */ throw new AnalysisException( "Do not specify the helper node to FE itself. " + "Please specify it to the existing running Leader or Follower FE"); } helperNodes.add(helperHostPort); } } else { helperNodes.add(Pair.create(selfNode.first, Config.edit_log_port)); } LOG.info("get helper nodes: {}", helperNodes); } private void getCheckedSelfHostPort() { selfNode = new Pair<>(FrontendOptions.getLocalHostAddress(), Config.edit_log_port); /* * For the first time, if the master start up failed, it will also fail to restart. * Check port using before create meta files to avoid this problem. */ try { if (NetUtils.isPortUsing(selfNode.first, selfNode.second)) { LOG.error("edit_log_port {} is already in use. will exit.", selfNode.second); System.exit(-1); } } catch (UnknownHostException e) { LOG.error(e); System.exit(-1); } LOG.debug("get self node: {}", selfNode); } public Pair<String, Integer> getHelperNode() { Preconditions.checkState(helperNodes.size() >= 1); return this.helperNodes.get(0); } public List<Pair<String, Integer>> getHelperNodes() { return Lists.newArrayList(helperNodes); } /* * If the current node is not in the frontend list, then exit. This may * happen when this node is removed from frontend list, and the drop * frontend log is deleted because of checkpoint. */ public void checkCurrentNodeExist() { if (Config.metadata_failure_recovery.equals("true")) { return; } Frontend fe = checkFeExist(selfNode.first, selfNode.second); if (fe == null) { LOG.error("current node is not added to the cluster, will exit"); System.exit(-1); } else if (fe.getRole() != role) { LOG.error("current node role is {} not match with frontend recorded role {}. will exit", role, fe.getRole()); System.exit(-1); } } private boolean getVersionFileFromHelper(Pair<String, Integer> helperNode) throws IOException { String url = "http: LOG.info("Downloading version file from {}", url); try { File dir = new File(this.imageDir); MetaHelper.getRemoteFile(url, HTTP_TIMEOUT_SECOND * 1000, MetaHelper.getOutputStream(Storage.VERSION_FILE, dir)); MetaHelper.complete(Storage.VERSION_FILE, dir); return true; } catch (Exception e) { LOG.warn("Fail to download version file from {}:{}", url, e.getMessage()); } return false; } /** * When a new node joins in the cluster for the first time, it will download image from the helper at the very beginning * Exception are free to raise on initialized phase */ private void getNewImageOnStartup(Pair<String, Integer> helperNode, String subDir) throws IOException { long localImageVersion = 0; String dirStr = this.imageDir + subDir; Storage storage = new Storage(dirStr); localImageVersion = storage.getImageJournalId(); URL infoUrl = new URL("http: StorageInfo info = getStorageInfo(infoUrl); long version = info.getImageJournalId(); if (version > localImageVersion) { String url = "http: + "/image?version=" + version + "&subdir=" + subDir; LOG.info("start to download image.{} from {}", version, url); String filename = Storage.IMAGE + "." + version; File dir = new File(dirStr); MetaHelper.getRemoteFile(url, HTTP_TIMEOUT_SECOND * 1000, MetaHelper.getOutputStream(filename, dir)); MetaHelper.complete(filename, dir); } else { LOG.info("skip download image for {}, current version {} >= version {} from {}", dirStr, localImageVersion, version, helperNode); } } public void addFrontend(FrontendNodeType role, String host, int editLogPort) throws DdlException { if (!tryLock(false)) { throw new DdlException("Failed to acquire globalStateMgr lock. Try again"); } try { try { if (checkFeExistByIpOrFqdn(host)) { throw new DdlException("FE with the same host: " + host + " already exists"); } } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", host, e); throw new DdlException("unknown fqdn host: " + host); } String nodeName = GlobalStateMgr.genFeNodeName(host, editLogPort, false /* new name style */); if (removedFrontends.contains(nodeName)) { throw new DdlException("frontend name already exists " + nodeName + ". Try again"); } Frontend fe = new Frontend(role, nodeName, host, editLogPort); frontends.put(nodeName, fe); if (role == FrontendNodeType.FOLLOWER) { helperNodes.add(Pair.create(host, editLogPort)); } if (GlobalStateMgr.getCurrentState().getHaProtocol() instanceof BDBHA) { BDBHA bdbha = (BDBHA) GlobalStateMgr.getCurrentState().getHaProtocol(); if (role == FrontendNodeType.FOLLOWER) { bdbha.addUnstableNode(host, getFollowerCnt()); } bdbha.removeNodeIfExist(host, editLogPort, nodeName); } GlobalStateMgr.getCurrentState().getEditLog().logAddFrontend(fe); } finally { unlock(); } } public void modifyFrontendHost(ModifyFrontendAddressClause modifyFrontendAddressClause) throws DdlException { String toBeModifyHost = modifyFrontendAddressClause.getSrcHost(); String fqdn = modifyFrontendAddressClause.getDestHost(); if (toBeModifyHost.equals(selfNode.first) && role == FrontendNodeType.LEADER) { throw new DdlException("can not modify current master node."); } if (!tryLock(false)) { throw new DdlException("Failed to acquire globalStateMgr lock. Try again"); } try { Frontend preUpdateFe = getFeByHost(toBeModifyHost); if (preUpdateFe == null) { throw new DdlException(String.format("frontend [%s] not found", toBeModifyHost)); } Frontend existFe = null; for (Frontend fe : frontends.values()) { if (fe.getHost().equals(fqdn)) { existFe = fe; } } if (null != existFe) { throw new DdlException("frontend with host [" + fqdn + "] already exists "); } BDBHA bdbha = (BDBHA) GlobalStateMgr.getCurrentState().getHaProtocol(); bdbha.updateFrontendHostAndPort(preUpdateFe.getNodeName(), fqdn, preUpdateFe.getEditLogPort()); preUpdateFe.updateHostAndEditLogPort(fqdn, preUpdateFe.getEditLogPort()); frontends.put(preUpdateFe.getNodeName(), preUpdateFe); GlobalStateMgr.getCurrentState().getEditLog().logUpdateFrontend(preUpdateFe); LOG.info("send update fe editlog success, fe info is [{}]", preUpdateFe.toString()); } finally { unlock(); } } public void dropFrontend(FrontendNodeType role, String host, int port) throws DdlException { if (host.equals(selfNode.first) && port == selfNode.second && GlobalStateMgr.getCurrentState().getFeType() == FrontendNodeType.LEADER) { throw new DdlException("can not drop current master node."); } if (!tryLock(false)) { throw new DdlException("Failed to acquire globalStateMgr lock. Try again"); } try { Frontend fe = unprotectCheckFeExist(host, port); if (fe == null) { throw new DdlException("frontend does not exist[" + host + ":" + port + "]"); } if (fe.getRole() != role) { throw new DdlException(role.toString() + " does not exist[" + host + ":" + port + "]"); } frontends.remove(fe.getNodeName()); removedFrontends.add(fe.getNodeName()); if (fe.getRole() == FrontendNodeType.FOLLOWER) { GlobalStateMgr.getCurrentState().getHaProtocol().removeElectableNode(fe.getNodeName()); helperNodes.remove(Pair.create(host, port)); BDBHA ha = (BDBHA) GlobalStateMgr.getCurrentState().getHaProtocol(); ha.removeUnstableNode(host, getFollowerCnt()); } GlobalStateMgr.getCurrentState().getEditLog().logRemoveFrontend(fe); } finally { unlock(); } } public void replayAddFrontend(Frontend fe) { tryLock(true); try { Frontend existFe = unprotectCheckFeExist(fe.getHost(), fe.getEditLogPort()); if (existFe != null) { LOG.warn("fe {} already exist.", existFe); if (existFe.getRole() != fe.getRole()) { /* * This may happen if: * 1. first, add a FE as OBSERVER. * 2. This OBSERVER is restarted with ROLE and VERSION file being DELETED. * In this case, this OBSERVER will be started as a FOLLOWER, and add itself to the frontends. * 3. this "FOLLOWER" begin to load image or replay journal, * then find the origin OBSERVER in image or journal. * This will cause UNDEFINED behavior, so it is better to exit and fix it manually. */ System.err.println("Try to add an already exist FE with different role" + fe.getRole()); System.exit(-1); } return; } frontends.put(fe.getNodeName(), fe); if (fe.getRole() == FrontendNodeType.FOLLOWER) { helperNodes.add(Pair.create(fe.getHost(), fe.getEditLogPort())); } } finally { unlock(); } } public void replayUpdateFrontend(Frontend frontend) { tryLock(true); try { Frontend fe = frontends.get(frontend.getNodeName()); if (fe == null) { LOG.error("try to update frontend, but " + frontend.toString() + " does not exist."); return; } fe.updateHostAndEditLogPort(frontend.getHost(), frontend.getEditLogPort()); frontends.put(fe.getNodeName(), fe); LOG.info("update fe successfully, fe info is [{}]", frontend.toString()); } finally { unlock(); } } public void replayDropFrontend(Frontend frontend) { tryLock(true); try { Frontend removedFe = frontends.remove(frontend.getNodeName()); if (removedFe == null) { LOG.error(frontend.toString() + " does not exist."); return; } if (removedFe.getRole() == FrontendNodeType.FOLLOWER) { helperNodes.remove(Pair.create(removedFe.getHost(), removedFe.getEditLogPort())); } removedFrontends.add(removedFe.getNodeName()); } finally { unlock(); } } public Frontend checkFeExist(String host, int port) { tryLock(true); try { return unprotectCheckFeExist(host, port); } finally { unlock(); } } public Frontend unprotectCheckFeExist(String host, int port) { for (Frontend fe : frontends.values()) { if (fe.getHost().equals(host) && fe.getEditLogPort() == port) { return fe; } } return null; } protected boolean checkFeExistByIpOrFqdn(String ipOrFqdn) throws UnknownHostException { Pair<String, String> targetIpAndFqdn = NetUtils.getIpAndFqdnByHost(ipOrFqdn); for (Frontend fe : frontends.values()) { Pair<String, String> curIpAndFqdn; try { curIpAndFqdn = NetUtils.getIpAndFqdnByHost(fe.getHost()); } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", fe.getHost(), e); if (targetIpAndFqdn.second.equals(fe.getHost()) && !Strings.isNullOrEmpty(targetIpAndFqdn.second)) { return true; } continue; } if (targetIpAndFqdn.first.equals(curIpAndFqdn.first)) { return true; } if (targetIpAndFqdn.second.equals(curIpAndFqdn.second) && !Strings.isNullOrEmpty(targetIpAndFqdn.second)) { return true; } } return false; } public Frontend getFeByHost(String ipOrFqdn) { Pair<String, String> targetPair; try { targetPair = NetUtils.getIpAndFqdnByHost(ipOrFqdn); } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", e.getMessage()); return null; } for (Frontend fe : frontends.values()) { Pair<String, String> curPair; try { curPair = NetUtils.getIpAndFqdnByHost(fe.getHost()); } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", e.getMessage()); continue; } if (targetPair.first.equals(curPair.first)) { return fe; } if (targetPair.second.equals(curPair.second) && !curPair.second.equals("")) { return fe; } } return null; } public Frontend getFeByName(String name) { for (Frontend fe : frontends.values()) { if (fe.getNodeName().equals(name)) { return fe; } } return null; } public int getFollowerCnt() { int cnt = 0; for (Frontend fe : frontends.values()) { if (fe.getRole() == FrontendNodeType.FOLLOWER) { cnt++; } } return cnt; } public int getClusterId() { return this.clusterId; } public void setClusterId(int clusterId) { this.clusterId = clusterId; } public String getToken() { return token; } public FrontendNodeType getRole() { return this.role; } public Pair<String, Integer> getSelfNode() { return this.selfNode; } public String getNodeName() { return this.nodeName; } public Pair<String, Integer> getLeaderIpAndRpcPort() { if (GlobalStateMgr.getServingState().isReady()) { return new Pair<>(this.leaderIp, this.leaderRpcPort); } else { String leaderNodeName = GlobalStateMgr.getServingState().getHaProtocol().getLeaderNodeName(); Frontend frontend = frontends.get(leaderNodeName); return new Pair<>(frontend.getHost(), frontend.getRpcPort()); } } public Pair<String, Integer> getLeaderIpAndHttpPort() { if (GlobalStateMgr.getServingState().isReady()) { return new Pair<>(this.leaderIp, this.leaderHttpPort); } else { String leaderNodeName = GlobalStateMgr.getServingState().getHaProtocol().getLeaderNodeName(); Frontend frontend = frontends.get(leaderNodeName); return new Pair<>(frontend.getHost(), Config.http_port); } } public String getLeaderIp() { if (GlobalStateMgr.getServingState().isReady()) { return this.leaderIp; } else { String leaderNodeName = GlobalStateMgr.getServingState().getHaProtocol().getLeaderNodeName(); return frontends.get(leaderNodeName).getHost(); } } public void setLeader(LeaderInfo info) { this.leaderIp = info.getIp(); this.leaderHttpPort = info.getHttpPort(); this.leaderRpcPort = info.getRpcPort(); } public void updateResourceUsage(long backendId, TResourceUsage usage) { List<Frontend> allFrontends = getFrontends(null); for (Frontend fe : allFrontends) { if (fe.getHost().equals(getSelfNode().first)) { continue; } TUpdateResourceUsageRequest request = new TUpdateResourceUsageRequest(); request.setBackend_id(backendId); request.setResource_usage(usage); try { TUpdateResourceUsageResponse response = FrontendServiceProxy .call(new TNetworkAddress(fe.getHost(), fe.getRpcPort()), Config.thrift_rpc_timeout_ms, Config.thrift_rpc_retry_times, client -> client.updateResourceUsage(request)); if (response.getStatus().getStatus_code() != TStatusCode.OK) { LOG.warn("UpdateResourceUsage to remote fe: {} failed", fe.getHost()); } } catch (Exception e) { LOG.warn("UpdateResourceUsage to remote fe: {} failed", fe.getHost(), e); } } } public void setConfig(AdminSetConfigStmt stmt) throws DdlException { setFrontendConfig(stmt.getConfig().getMap()); List<Frontend> allFrontends = getFrontends(null); int timeout = ConnectContext.get().getSessionVariable().getQueryTimeoutS() * 1000 + Config.thrift_rpc_timeout_ms; StringBuilder errMsg = new StringBuilder(); for (Frontend fe : allFrontends) { if (fe.getHost().equals(getSelfNode().first)) { continue; } TSetConfigRequest request = new TSetConfigRequest(); request.setKeys(Lists.newArrayList(stmt.getConfig().getKey())); request.setValues(Lists.newArrayList(stmt.getConfig().getValue())); try { TSetConfigResponse response = FrontendServiceProxy .call(new TNetworkAddress(fe.getHost(), fe.getRpcPort()), timeout, Config.thrift_rpc_retry_times, client -> client.setConfig(request)); TStatus status = response.getStatus(); if (status.getStatus_code() != TStatusCode.OK) { errMsg.append("set config for fe[").append(fe.getHost()).append("] failed: "); if (status.getError_msgs() != null && status.getError_msgs().size() > 0) { errMsg.append(String.join(",", status.getError_msgs())); } errMsg.append(";"); } } catch (Exception e) { LOG.warn("set remote fe: {} config failed", fe.getHost(), e); errMsg.append("set config for fe[").append(fe.getHost()).append("] failed: ").append(e.getMessage()); } } if (errMsg.length() > 0) { ErrorReport.reportDdlException(ErrorCode.ERROR_SET_CONFIG_FAILED, errMsg.toString()); } } public void setFrontendConfig(Map<String, String> configs) throws DdlException { for (Map.Entry<String, String> entry : configs.entrySet()) { ConfigBase.setMutableConfig(entry.getKey(), entry.getValue()); } } public Frontend getMySelf() { return frontends.get(nodeName); } public ConcurrentHashMap<String, Frontend> getFrontends() { return frontends; } public long loadBrokers(DataInputStream dis, long checksum) throws IOException { int count = dis.readInt(); checksum ^= count; for (long i = 0; i < count; ++i) { String brokerName = Text.readString(dis); int size = dis.readInt(); checksum ^= size; List<FsBroker> addrs = Lists.newArrayList(); for (int j = 0; j < size; j++) { FsBroker addr = FsBroker.readIn(dis); addrs.add(addr); } brokerMgr.replayAddBrokers(brokerName, addrs); } LOG.info("finished replay brokerMgr from image"); return checksum; } public long saveBrokers(DataOutputStream dos, long checksum) throws IOException { Map<String, List<FsBroker>> addressListMap = brokerMgr.getBrokerListMap(); int size = addressListMap.size(); checksum ^= size; dos.writeInt(size); for (Map.Entry<String, List<FsBroker>> entry : addressListMap.entrySet()) { Text.writeString(dos, entry.getKey()); final List<FsBroker> addrs = entry.getValue(); size = addrs.size(); checksum ^= size; dos.writeInt(size); for (FsBroker addr : addrs) { addr.write(dos); } } return checksum; } public long loadLeaderInfo(DataInputStream dis, long checksum) throws IOException { leaderIp = Text.readString(dis); leaderRpcPort = dis.readInt(); long newChecksum = checksum ^ leaderRpcPort; leaderHttpPort = dis.readInt(); newChecksum ^= leaderHttpPort; LOG.info("finished replay masterInfo from image"); return newChecksum; } public long saveLeaderInfo(DataOutputStream dos, long checksum) throws IOException { Text.writeString(dos, leaderIp); checksum ^= leaderRpcPort; dos.writeInt(leaderRpcPort); checksum ^= leaderHttpPort; dos.writeInt(leaderHttpPort); return checksum; } public void save(DataOutputStream dos) throws IOException, SRMetaBlockException { SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.NODE_MGR, 1); writer.writeJson(this); writer.close(); } public void load(SRMetaBlockReader reader) throws IOException, SRMetaBlockException, SRMetaBlockEOFException { NodeMgr nodeMgr = reader.readJson(NodeMgr.class); leaderRpcPort = nodeMgr.leaderRpcPort; leaderHttpPort = nodeMgr.leaderHttpPort; leaderIp = nodeMgr.leaderIp; frontends = nodeMgr.frontends; removedFrontends = nodeMgr.removedFrontends; systemInfo = nodeMgr.systemInfo; systemInfoMap.put(clusterId, systemInfo); brokerMgr = nodeMgr.brokerMgr; } public void setLeaderInfo() { this.leaderIp = FrontendOptions.getLocalHostAddress(); this.leaderRpcPort = Config.rpc_port; this.leaderHttpPort = Config.http_port; LeaderInfo info = new LeaderInfo(this.leaderIp, this.leaderHttpPort, this.leaderRpcPort); GlobalStateMgr.getCurrentState().getEditLog().logLeaderInfo(info); } public boolean isFirstTimeStartUp() { return isFirstTimeStartUp; } public boolean isElectable() { return isElectable; } public void setImageDir(String imageDir) { this.imageDir = imageDir; } }
class NodeMgr { private static final Logger LOG = LogManager.getLogger(NodeMgr.class); private static final int HTTP_TIMEOUT_SECOND = 5; /** * LeaderInfo */ @SerializedName(value = "r") private int leaderRpcPort; @SerializedName(value = "h") private int leaderHttpPort; @SerializedName(value = "ip") private String leaderIp; /** * Frontends * <p> * frontends : name -> Frontend * removedFrontends: removed frontends' name. used for checking if name is duplicated in bdbje */ @SerializedName(value = "f") private ConcurrentHashMap<String, Frontend> frontends = new ConcurrentHashMap<>(); @SerializedName(value = "rf") private ConcurrentLinkedQueue<String> removedFrontends = new ConcurrentLinkedQueue<>(); /** * Backends and Compute Node */ @SerializedName(value = "s") private SystemInfoService systemInfo; /** * Broker */ @SerializedName(value = "b") private BrokerMgr brokerMgr; private boolean isFirstTimeStartUp = false; private boolean isElectable; private String nodeName; private FrontendNodeType role; private int clusterId; private String token; private String runMode; private String imageDir; private final List<Pair<String, Integer>> helperNodes = Lists.newArrayList(); private Pair<String, Integer> selfNode = null; private final Map<Integer, SystemInfoService> systemInfoMap = new ConcurrentHashMap<>(); public NodeMgr() { this.role = FrontendNodeType.UNKNOWN; this.leaderRpcPort = 0; this.leaderHttpPort = 0; this.leaderIp = ""; this.systemInfo = new SystemInfoService(); this.brokerMgr = new BrokerMgr(); } public void initialize(String[] args) throws Exception { getCheckedSelfHostPort(); getHelperNodes(args); } private boolean tryLock(boolean mustLock) { return GlobalStateMgr.getCurrentState().tryLock(mustLock); } private void unlock() { GlobalStateMgr.getCurrentState().unlock(); } public List<Frontend> getFrontends(FrontendNodeType nodeType) { if (nodeType == null) { return Lists.newArrayList(frontends.values()); } List<Frontend> result = Lists.newArrayList(); for (Frontend frontend : frontends.values()) { if (frontend.getRole() == nodeType) { result.add(frontend); } } return result; } public List<String> getRemovedFrontendNames() { return Lists.newArrayList(removedFrontends); } public SystemInfoService getOrCreateSystemInfo(Integer clusterId) { SystemInfoService systemInfoService = systemInfoMap.get(clusterId); if (systemInfoService == null) { systemInfoService = new SystemInfoService(); systemInfoMap.put(clusterId, systemInfoService); } return systemInfoService; } public SystemInfoService getClusterInfo() { return this.systemInfo; } public BrokerMgr getBrokerMgr() { return brokerMgr; } public void getClusterIdAndRoleOnStartup() throws IOException { File roleFile = new File(this.imageDir, Storage.ROLE_FILE); File versionFile = new File(this.imageDir, Storage.VERSION_FILE); boolean isVersionFileChanged = false; Storage storage = new Storage(this.imageDir); if (isMyself() || (roleFile.exists() && versionFile.exists())) { if (!isMyself()) { LOG.info("find ROLE and VERSION file in local, ignore helper nodes: {}", helperNodes); } if ((roleFile.exists() && !versionFile.exists()) || (!roleFile.exists() && versionFile.exists())) { LOG.error("role file and version file must both exist or both not exist. " + "please specific one helper node to recover. will exit."); System.exit(-1); } if (!roleFile.exists()) { role = FrontendNodeType.FOLLOWER; nodeName = GlobalStateMgr.genFeNodeName(selfNode.first, selfNode.second, false /* new style */); storage.writeFrontendRoleAndNodeName(role, nodeName); LOG.info("very first time to start this node. role: {}, node name: {}", role.name(), nodeName); } else { role = storage.getRole(); nodeName = storage.getNodeName(); if (Strings.isNullOrEmpty(nodeName)) { nodeName = GlobalStateMgr.genFeNodeName(selfNode.first, selfNode.second, true/* old style */); storage.writeFrontendRoleAndNodeName(role, nodeName); LOG.info("forward compatibility. role: {}, node name: {}", role.name(), nodeName); } } Preconditions.checkNotNull(role); Preconditions.checkNotNull(nodeName); if (!versionFile.exists()) { clusterId = Config.cluster_id == -1 ? Storage.newClusterID() : Config.cluster_id; token = Strings.isNullOrEmpty(Config.auth_token) ? Storage.newToken() : Config.auth_token; storage = new Storage(clusterId, token, this.imageDir); isVersionFileChanged = true; isFirstTimeStartUp = true; Frontend self = new Frontend(role, nodeName, selfNode.first, selfNode.second); frontends.put(nodeName, self); } else { clusterId = storage.getClusterID(); if (storage.getToken() == null) { token = Strings.isNullOrEmpty(Config.auth_token) ? Storage.newToken() : Config.auth_token; LOG.info("new token={}", token); storage.setToken(token); isVersionFileChanged = true; } else { token = storage.getToken(); } runMode = storage.getRunMode(); isFirstTimeStartUp = false; } } else { while (true) { if (!getFeNodeTypeAndNameFromHelpers()) { LOG.warn("current node is not added to the group. please add it first. " + "sleep 5 seconds and retry, current helper nodes: {}", helperNodes); try { Thread.sleep(5000); continue; } catch (InterruptedException e) { LOG.warn(e); System.exit(-1); } } break; } Preconditions.checkState(helperNodes.size() == 1); Preconditions.checkNotNull(role); Preconditions.checkNotNull(nodeName); Pair<String, Integer> rightHelperNode = helperNodes.get(0); storage = new Storage(this.imageDir); if (roleFile.exists() && (role != storage.getRole() || !nodeName.equals(storage.getNodeName())) || !roleFile.exists()) { storage.writeFrontendRoleAndNodeName(role, nodeName); } if (!versionFile.exists()) { if (!getVersionFileFromHelper(rightHelperNode)) { System.exit(-1); } storage = new Storage(this.imageDir); clusterId = storage.getClusterID(); token = storage.getToken(); runMode = storage.getRunMode(); if (Strings.isNullOrEmpty(token)) { token = Config.auth_token; isVersionFileChanged = true; } if (Strings.isNullOrEmpty(runMode)) { runMode = RunMode.SHARED_NOTHING.getName(); storage.setRunMode(runMode); isVersionFileChanged = true; } } else { clusterId = storage.getClusterID(); token = storage.getToken(); runMode = storage.getRunMode(); if (Strings.isNullOrEmpty(runMode)) { runMode = RunMode.SHARED_NOTHING.getName(); storage.setRunMode(runMode); isVersionFileChanged = true; } try { URL idURL = new URL("http: HttpURLConnection conn = null; conn = (HttpURLConnection) idURL.openConnection(); conn.setConnectTimeout(2 * 1000); conn.setReadTimeout(2 * 1000); String clusterIdString = conn.getHeaderField(MetaBaseAction.CLUSTER_ID); int remoteClusterId = Integer.parseInt(clusterIdString); if (remoteClusterId != clusterId) { LOG.error("cluster id is not equal with helper node {}. will exit.", rightHelperNode.first); System.exit(-1); } String remoteToken = conn.getHeaderField(MetaBaseAction.TOKEN); if (token == null && remoteToken != null) { LOG.info("get token from helper node. token={}.", remoteToken); token = remoteToken; isVersionFileChanged = true; storage.reload(); } if (Config.enable_token_check) { Preconditions.checkNotNull(token); Preconditions.checkNotNull(remoteToken); if (!token.equals(remoteToken)) { LOG.error("token is not equal with helper node {}. will exit.", rightHelperNode.first); System.exit(-1); } } String remoteRunMode = conn.getHeaderField(MetaBaseAction.RUN_MODE); if (Strings.isNullOrEmpty(remoteRunMode)) { remoteRunMode = RunMode.SHARED_NOTHING.getName(); } if (!runMode.equalsIgnoreCase(remoteRunMode)) { LOG.error("Unmatched run mode with helper node {}: {} vs {}, will exit .", rightHelperNode.first, runMode, remoteRunMode); System.exit(-1); } } catch (Exception e) { LOG.warn("fail to check cluster_id and token with helper node.", e); System.exit(-1); } } getNewImageOnStartup(rightHelperNode, ""); if (RunMode.allowCreateLakeTable()) { String subDir = this.imageDir + StarMgrServer.IMAGE_SUBDIR; File dir = new File(subDir); if (!dir.exists()) { LOG.info("create image dir for {}.", dir.getAbsolutePath()); if (!dir.mkdir()) { LOG.error("create image dir for star mgr failed! exit now."); System.exit(-1); } } getNewImageOnStartup(rightHelperNode, StarMgrServer.IMAGE_SUBDIR); } } if (Config.cluster_id != -1 && clusterId != Config.cluster_id) { LOG.error("cluster id is not equal with config item cluster_id. will exit."); System.exit(-1); } if (Strings.isNullOrEmpty(runMode)) { if (isFirstTimeStartUp) { runMode = RunMode.name(); storage.setRunMode(runMode); isVersionFileChanged = true; } else if (RunMode.allowCreateLakeTable()) { LOG.error("Upgrading from a cluster with version less than 3.0 to a cluster with run mode {} of " + "version 3.0 or above is disallowed. will exit", RunMode.name()); System.exit(-1); } } else if (!runMode.equalsIgnoreCase(RunMode.name())) { LOG.error("Unmatched run mode between config file and version file: {} vs {}. will exit! ", RunMode.name(), runMode); System.exit(-1); } if (isVersionFileChanged) { storage.writeVersionFile(); } LOG.info("Current run_mode is {}", runMode); isElectable = role.equals(FrontendNodeType.FOLLOWER); systemInfoMap.put(clusterId, systemInfo); Preconditions.checkState(helperNodes.size() == 1); LOG.info("Got cluster id: {}, role: {}, node name: {} and run_mode: {}", clusterId, role.name(), nodeName, runMode); } private boolean getFeNodeTypeAndNameFromHelpers() { Pair<String, Integer> rightHelperNode = null; for (Pair<String, Integer> helperNode : helperNodes) { try { URL url = new URL("http: + "/role?host=" + selfNode.first + "&port=" + selfNode.second); HttpURLConnection conn = null; conn = (HttpURLConnection) url.openConnection(); if (conn.getResponseCode() != 200) { LOG.warn("failed to get fe node type from helper node: {}. response code: {}", helperNode, conn.getResponseCode()); continue; } String type = conn.getHeaderField("role"); if (type == null) { LOG.warn("failed to get fe node type from helper node: {}.", helperNode); continue; } role = FrontendNodeType.valueOf(type); nodeName = conn.getHeaderField("name"); if (role == FrontendNodeType.UNKNOWN) { LOG.warn("frontend {} is not added to cluster yet. role UNKNOWN", selfNode); return false; } if (Strings.isNullOrEmpty(nodeName)) { nodeName = GlobalStateMgr.genFeNodeName(selfNode.first, selfNode.second, true /* old style */); } } catch (Exception e) { LOG.warn("failed to get fe node type from helper node: {}.", helperNode, e); continue; } LOG.info("get fe node type {}, name {} from {}:{}", role, nodeName, helperNode.first, Config.http_port); rightHelperNode = helperNode; break; } if (rightHelperNode == null) { return false; } helperNodes.clear(); helperNodes.add(rightHelperNode); return true; } private boolean isMyself() { Preconditions.checkNotNull(selfNode); Preconditions.checkNotNull(helperNodes); LOG.debug("self: {}. helpers: {}", selfNode, helperNodes); boolean containSelf = false; for (Pair<String, Integer> helperNode : helperNodes) { if (selfNode.equals(helperNode)) { containSelf = true; } } if (containSelf) { helperNodes.clear(); helperNodes.add(selfNode); } return containSelf; } public long loadFrontends(DataInputStream dis, long checksum) throws IOException { int size = dis.readInt(); long newChecksum = checksum ^ size; for (int i = 0; i < size; i++) { Frontend fe = Frontend.read(dis); replayAddFrontend(fe); } size = dis.readInt(); newChecksum ^= size; for (int i = 0; i < size; i++) { removedFrontends.add(Text.readString(dis)); } LOG.info("finished replay frontends from image"); return newChecksum; } public long saveFrontends(DataOutputStream dos, long checksum) throws IOException { int size = frontends.size(); checksum ^= size; dos.writeInt(size); for (Frontend fe : frontends.values()) { fe.write(dos); } size = removedFrontends.size(); checksum ^= size; dos.writeInt(size); for (String feName : removedFrontends) { Text.writeString(dos, feName); } return checksum; } public long loadBackends(DataInputStream dis, long checksum) throws IOException { return systemInfo.loadBackends(dis, checksum); } public long saveBackends(DataOutputStream dos, long checksum) throws IOException { return systemInfo.saveBackends(dos, checksum); } public long loadComputeNodes(DataInputStream dis, long checksum) throws IOException { return systemInfo.loadComputeNodes(dis, checksum); } public long saveComputeNodes(DataOutputStream dos, long checksum) throws IOException { return systemInfo.saveComputeNodes(dos, checksum); } private StorageInfo getStorageInfo(URL url) throws IOException { ObjectMapper mapper = new ObjectMapper(); HttpURLConnection connection = null; try { connection = (HttpURLConnection) url.openConnection(); connection.setConnectTimeout(HTTP_TIMEOUT_SECOND * 1000); connection.setReadTimeout(HTTP_TIMEOUT_SECOND * 1000); return mapper.readValue(connection.getInputStream(), StorageInfo.class); } finally { if (connection != null) { connection.disconnect(); } } } private void getHelperNodes(String[] args) throws AnalysisException { String helpers = null; for (int i = 0; i < args.length; i++) { if (args[i].equalsIgnoreCase("-helper")) { if (i + 1 >= args.length) { System.out.println("-helper need parameter host:port,host:port"); System.exit(-1); } helpers = args[i + 1]; if (!helpers.contains(":")) { System.out.print("helper's format seems was wrong [" + helpers + "]"); System.out.println(", eg. host:port,host:port"); System.exit(-1); } break; } } if (helpers != null) { String[] splittedHelpers = helpers.split(","); for (String helper : splittedHelpers) { Pair<String, Integer> helperHostPort = SystemInfoService.validateHostAndPort(helper); if (helperHostPort.equals(selfNode)) { /* * If user specified the helper node to this FE itself, * we will stop the starting FE process and report an error. * First, it is meaningless to point the helper to itself. * Secondly, when some users add FE for the first time, they will mistakenly * point the helper that should have pointed to the Master to themselves. * In this case, some errors have caused users to be troubled. * So here directly exit the program and inform the user to avoid unnecessary trouble. */ throw new AnalysisException( "Do not specify the helper node to FE itself. " + "Please specify it to the existing running Leader or Follower FE"); } helperNodes.add(helperHostPort); } } else { helperNodes.add(Pair.create(selfNode.first, Config.edit_log_port)); } LOG.info("get helper nodes: {}", helperNodes); } private void getCheckedSelfHostPort() { selfNode = new Pair<>(FrontendOptions.getLocalHostAddress(), Config.edit_log_port); /* * For the first time, if the master start up failed, it will also fail to restart. * Check port using before create meta files to avoid this problem. */ try { if (NetUtils.isPortUsing(selfNode.first, selfNode.second)) { LOG.error("edit_log_port {} is already in use. will exit.", selfNode.second); System.exit(-1); } } catch (UnknownHostException e) { LOG.error(e); System.exit(-1); } LOG.debug("get self node: {}", selfNode); } public Pair<String, Integer> getHelperNode() { Preconditions.checkState(helperNodes.size() >= 1); return this.helperNodes.get(0); } public List<Pair<String, Integer>> getHelperNodes() { return Lists.newArrayList(helperNodes); } /* * If the current node is not in the frontend list, then exit. This may * happen when this node is removed from frontend list, and the drop * frontend log is deleted because of checkpoint. */ public void checkCurrentNodeExist() { if (Config.metadata_failure_recovery.equals("true")) { return; } Frontend fe = checkFeExist(selfNode.first, selfNode.second); if (fe == null) { LOG.error("current node is not added to the cluster, will exit"); System.exit(-1); } else if (fe.getRole() != role) { LOG.error("current node role is {} not match with frontend recorded role {}. will exit", role, fe.getRole()); System.exit(-1); } } private boolean getVersionFileFromHelper(Pair<String, Integer> helperNode) throws IOException { String url = "http: LOG.info("Downloading version file from {}", url); try { File dir = new File(this.imageDir); MetaHelper.getRemoteFile(url, HTTP_TIMEOUT_SECOND * 1000, MetaHelper.getOutputStream(Storage.VERSION_FILE, dir)); MetaHelper.complete(Storage.VERSION_FILE, dir); return true; } catch (Exception e) { LOG.warn("Fail to download version file from {}:{}", url, e.getMessage()); } return false; } /** * When a new node joins in the cluster for the first time, it will download image from the helper at the very beginning * Exception are free to raise on initialized phase */ private void getNewImageOnStartup(Pair<String, Integer> helperNode, String subDir) throws IOException { long localImageVersion = 0; String dirStr = this.imageDir + subDir; Storage storage = new Storage(dirStr); localImageVersion = storage.getImageJournalId(); URL infoUrl = new URL("http: StorageInfo info = getStorageInfo(infoUrl); long version = info.getImageJournalId(); if (version > localImageVersion) { String url = "http: + "/image?version=" + version + "&subdir=" + subDir; LOG.info("start to download image.{} from {}", version, url); String filename = Storage.IMAGE + "." + version; File dir = new File(dirStr); MetaHelper.getRemoteFile(url, HTTP_TIMEOUT_SECOND * 1000, MetaHelper.getOutputStream(filename, dir)); MetaHelper.complete(filename, dir); } else { LOG.info("skip download image for {}, current version {} >= version {} from {}", dirStr, localImageVersion, version, helperNode); } } public void addFrontend(FrontendNodeType role, String host, int editLogPort) throws DdlException { if (!tryLock(false)) { throw new DdlException("Failed to acquire globalStateMgr lock. Try again"); } try { try { if (checkFeExistByIpOrFqdn(host)) { throw new DdlException("FE with the same host: " + host + " already exists"); } } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", host, e); throw new DdlException("unknown fqdn host: " + host); } String nodeName = GlobalStateMgr.genFeNodeName(host, editLogPort, false /* new name style */); if (removedFrontends.contains(nodeName)) { throw new DdlException("frontend name already exists " + nodeName + ". Try again"); } Frontend fe = new Frontend(role, nodeName, host, editLogPort); frontends.put(nodeName, fe); if (role == FrontendNodeType.FOLLOWER) { helperNodes.add(Pair.create(host, editLogPort)); } if (GlobalStateMgr.getCurrentState().getHaProtocol() instanceof BDBHA) { BDBHA bdbha = (BDBHA) GlobalStateMgr.getCurrentState().getHaProtocol(); if (role == FrontendNodeType.FOLLOWER) { bdbha.addUnstableNode(host, getFollowerCnt()); } bdbha.removeNodeIfExist(host, editLogPort, nodeName); } GlobalStateMgr.getCurrentState().getEditLog().logAddFrontend(fe); } finally { unlock(); } } public void modifyFrontendHost(ModifyFrontendAddressClause modifyFrontendAddressClause) throws DdlException { String toBeModifyHost = modifyFrontendAddressClause.getSrcHost(); String fqdn = modifyFrontendAddressClause.getDestHost(); if (toBeModifyHost.equals(selfNode.first) && role == FrontendNodeType.LEADER) { throw new DdlException("can not modify current master node."); } if (!tryLock(false)) { throw new DdlException("Failed to acquire globalStateMgr lock. Try again"); } try { Frontend preUpdateFe = getFeByHost(toBeModifyHost); if (preUpdateFe == null) { throw new DdlException(String.format("frontend [%s] not found", toBeModifyHost)); } Frontend existFe = null; for (Frontend fe : frontends.values()) { if (fe.getHost().equals(fqdn)) { existFe = fe; } } if (null != existFe) { throw new DdlException("frontend with host [" + fqdn + "] already exists "); } BDBHA bdbha = (BDBHA) GlobalStateMgr.getCurrentState().getHaProtocol(); bdbha.updateFrontendHostAndPort(preUpdateFe.getNodeName(), fqdn, preUpdateFe.getEditLogPort()); preUpdateFe.updateHostAndEditLogPort(fqdn, preUpdateFe.getEditLogPort()); frontends.put(preUpdateFe.getNodeName(), preUpdateFe); GlobalStateMgr.getCurrentState().getEditLog().logUpdateFrontend(preUpdateFe); LOG.info("send update fe editlog success, fe info is [{}]", preUpdateFe.toString()); } finally { unlock(); } } public void dropFrontend(FrontendNodeType role, String host, int port) throws DdlException { if (host.equals(selfNode.first) && port == selfNode.second && GlobalStateMgr.getCurrentState().getFeType() == FrontendNodeType.LEADER) { throw new DdlException("can not drop current master node."); } if (!tryLock(false)) { throw new DdlException("Failed to acquire globalStateMgr lock. Try again"); } try { Frontend fe = unprotectCheckFeExist(host, port); if (fe == null) { throw new DdlException("frontend does not exist[" + host + ":" + port + "]"); } if (fe.getRole() != role) { throw new DdlException(role.toString() + " does not exist[" + host + ":" + port + "]"); } frontends.remove(fe.getNodeName()); removedFrontends.add(fe.getNodeName()); if (fe.getRole() == FrontendNodeType.FOLLOWER) { GlobalStateMgr.getCurrentState().getHaProtocol().removeElectableNode(fe.getNodeName()); helperNodes.remove(Pair.create(host, port)); BDBHA ha = (BDBHA) GlobalStateMgr.getCurrentState().getHaProtocol(); ha.removeUnstableNode(host, getFollowerCnt()); } GlobalStateMgr.getCurrentState().getEditLog().logRemoveFrontend(fe); } finally { unlock(); } } public void replayAddFrontend(Frontend fe) { tryLock(true); try { Frontend existFe = unprotectCheckFeExist(fe.getHost(), fe.getEditLogPort()); if (existFe != null) { LOG.warn("fe {} already exist.", existFe); if (existFe.getRole() != fe.getRole()) { /* * This may happen if: * 1. first, add a FE as OBSERVER. * 2. This OBSERVER is restarted with ROLE and VERSION file being DELETED. * In this case, this OBSERVER will be started as a FOLLOWER, and add itself to the frontends. * 3. this "FOLLOWER" begin to load image or replay journal, * then find the origin OBSERVER in image or journal. * This will cause UNDEFINED behavior, so it is better to exit and fix it manually. */ System.err.println("Try to add an already exist FE with different role" + fe.getRole()); System.exit(-1); } return; } frontends.put(fe.getNodeName(), fe); if (fe.getRole() == FrontendNodeType.FOLLOWER) { helperNodes.add(Pair.create(fe.getHost(), fe.getEditLogPort())); } } finally { unlock(); } } public void replayUpdateFrontend(Frontend frontend) { tryLock(true); try { Frontend fe = frontends.get(frontend.getNodeName()); if (fe == null) { LOG.error("try to update frontend, but " + frontend.toString() + " does not exist."); return; } fe.updateHostAndEditLogPort(frontend.getHost(), frontend.getEditLogPort()); frontends.put(fe.getNodeName(), fe); LOG.info("update fe successfully, fe info is [{}]", frontend.toString()); } finally { unlock(); } } public void replayDropFrontend(Frontend frontend) { tryLock(true); try { Frontend removedFe = frontends.remove(frontend.getNodeName()); if (removedFe == null) { LOG.error(frontend.toString() + " does not exist."); return; } if (removedFe.getRole() == FrontendNodeType.FOLLOWER) { helperNodes.remove(Pair.create(removedFe.getHost(), removedFe.getEditLogPort())); } removedFrontends.add(removedFe.getNodeName()); } finally { unlock(); } } public Frontend checkFeExist(String host, int port) { tryLock(true); try { return unprotectCheckFeExist(host, port); } finally { unlock(); } } public Frontend unprotectCheckFeExist(String host, int port) { for (Frontend fe : frontends.values()) { if (fe.getHost().equals(host) && fe.getEditLogPort() == port) { return fe; } } return null; } protected boolean checkFeExistByIpOrFqdn(String ipOrFqdn) throws UnknownHostException { Pair<String, String> targetIpAndFqdn = NetUtils.getIpAndFqdnByHost(ipOrFqdn); for (Frontend fe : frontends.values()) { Pair<String, String> curIpAndFqdn; try { curIpAndFqdn = NetUtils.getIpAndFqdnByHost(fe.getHost()); } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", fe.getHost(), e); if (targetIpAndFqdn.second.equals(fe.getHost()) && !Strings.isNullOrEmpty(targetIpAndFqdn.second)) { return true; } continue; } if (targetIpAndFqdn.first.equals(curIpAndFqdn.first)) { return true; } if (targetIpAndFqdn.second.equals(curIpAndFqdn.second) && !Strings.isNullOrEmpty(targetIpAndFqdn.second)) { return true; } } return false; } public Frontend getFeByHost(String ipOrFqdn) { Pair<String, String> targetPair; try { targetPair = NetUtils.getIpAndFqdnByHost(ipOrFqdn); } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", e.getMessage()); return null; } for (Frontend fe : frontends.values()) { Pair<String, String> curPair; try { curPair = NetUtils.getIpAndFqdnByHost(fe.getHost()); } catch (UnknownHostException e) { LOG.warn("failed to get right ip by fqdn {}", e.getMessage()); continue; } if (targetPair.first.equals(curPair.first)) { return fe; } if (targetPair.second.equals(curPair.second) && !curPair.second.equals("")) { return fe; } } return null; } public Frontend getFeByName(String name) { for (Frontend fe : frontends.values()) { if (fe.getNodeName().equals(name)) { return fe; } } return null; } public int getFollowerCnt() { int cnt = 0; for (Frontend fe : frontends.values()) { if (fe.getRole() == FrontendNodeType.FOLLOWER) { cnt++; } } return cnt; } public int getClusterId() { return this.clusterId; } public void setClusterId(int clusterId) { this.clusterId = clusterId; } public String getToken() { return token; } public FrontendNodeType getRole() { return this.role; } public Pair<String, Integer> getSelfNode() { return this.selfNode; } public String getNodeName() { return this.nodeName; } public Pair<String, Integer> getLeaderIpAndRpcPort() { if (GlobalStateMgr.getServingState().isReady()) { return new Pair<>(this.leaderIp, this.leaderRpcPort); } else { String leaderNodeName = GlobalStateMgr.getServingState().getHaProtocol().getLeaderNodeName(); Frontend frontend = frontends.get(leaderNodeName); return new Pair<>(frontend.getHost(), frontend.getRpcPort()); } } public Pair<String, Integer> getLeaderIpAndHttpPort() { if (GlobalStateMgr.getServingState().isReady()) { return new Pair<>(this.leaderIp, this.leaderHttpPort); } else { String leaderNodeName = GlobalStateMgr.getServingState().getHaProtocol().getLeaderNodeName(); Frontend frontend = frontends.get(leaderNodeName); return new Pair<>(frontend.getHost(), Config.http_port); } } public String getLeaderIp() { if (GlobalStateMgr.getServingState().isReady()) { return this.leaderIp; } else { String leaderNodeName = GlobalStateMgr.getServingState().getHaProtocol().getLeaderNodeName(); return frontends.get(leaderNodeName).getHost(); } } public void setLeader(LeaderInfo info) { this.leaderIp = info.getIp(); this.leaderHttpPort = info.getHttpPort(); this.leaderRpcPort = info.getRpcPort(); } public void updateResourceUsage(long backendId, TResourceUsage usage) { List<Frontend> allFrontends = getFrontends(null); for (Frontend fe : allFrontends) { if (fe.getHost().equals(getSelfNode().first)) { continue; } TUpdateResourceUsageRequest request = new TUpdateResourceUsageRequest(); request.setBackend_id(backendId); request.setResource_usage(usage); try { TUpdateResourceUsageResponse response = FrontendServiceProxy .call(new TNetworkAddress(fe.getHost(), fe.getRpcPort()), Config.thrift_rpc_timeout_ms, Config.thrift_rpc_retry_times, client -> client.updateResourceUsage(request)); if (response.getStatus().getStatus_code() != TStatusCode.OK) { LOG.warn("UpdateResourceUsage to remote fe: {} failed", fe.getHost()); } } catch (Exception e) { LOG.warn("UpdateResourceUsage to remote fe: {} failed", fe.getHost(), e); } } } public void setConfig(AdminSetConfigStmt stmt) throws DdlException { setFrontendConfig(stmt.getConfig().getMap()); List<Frontend> allFrontends = getFrontends(null); int timeout = ConnectContext.get().getSessionVariable().getQueryTimeoutS() * 1000 + Config.thrift_rpc_timeout_ms; StringBuilder errMsg = new StringBuilder(); for (Frontend fe : allFrontends) { if (fe.getHost().equals(getSelfNode().first)) { continue; } TSetConfigRequest request = new TSetConfigRequest(); request.setKeys(Lists.newArrayList(stmt.getConfig().getKey())); request.setValues(Lists.newArrayList(stmt.getConfig().getValue())); try { TSetConfigResponse response = FrontendServiceProxy .call(new TNetworkAddress(fe.getHost(), fe.getRpcPort()), timeout, Config.thrift_rpc_retry_times, client -> client.setConfig(request)); TStatus status = response.getStatus(); if (status.getStatus_code() != TStatusCode.OK) { errMsg.append("set config for fe[").append(fe.getHost()).append("] failed: "); if (status.getError_msgs() != null && status.getError_msgs().size() > 0) { errMsg.append(String.join(",", status.getError_msgs())); } errMsg.append(";"); } } catch (Exception e) { LOG.warn("set remote fe: {} config failed", fe.getHost(), e); errMsg.append("set config for fe[").append(fe.getHost()).append("] failed: ").append(e.getMessage()); } } if (errMsg.length() > 0) { ErrorReport.reportDdlException(ErrorCode.ERROR_SET_CONFIG_FAILED, errMsg.toString()); } } public void setFrontendConfig(Map<String, String> configs) throws DdlException { for (Map.Entry<String, String> entry : configs.entrySet()) { ConfigBase.setMutableConfig(entry.getKey(), entry.getValue()); } } public Frontend getMySelf() { return frontends.get(nodeName); } public ConcurrentHashMap<String, Frontend> getFrontends() { return frontends; } public long loadBrokers(DataInputStream dis, long checksum) throws IOException { int count = dis.readInt(); checksum ^= count; for (long i = 0; i < count; ++i) { String brokerName = Text.readString(dis); int size = dis.readInt(); checksum ^= size; List<FsBroker> addrs = Lists.newArrayList(); for (int j = 0; j < size; j++) { FsBroker addr = FsBroker.readIn(dis); addrs.add(addr); } brokerMgr.replayAddBrokers(brokerName, addrs); } LOG.info("finished replay brokerMgr from image"); return checksum; } public long saveBrokers(DataOutputStream dos, long checksum) throws IOException { Map<String, List<FsBroker>> addressListMap = brokerMgr.getBrokerListMap(); int size = addressListMap.size(); checksum ^= size; dos.writeInt(size); for (Map.Entry<String, List<FsBroker>> entry : addressListMap.entrySet()) { Text.writeString(dos, entry.getKey()); final List<FsBroker> addrs = entry.getValue(); size = addrs.size(); checksum ^= size; dos.writeInt(size); for (FsBroker addr : addrs) { addr.write(dos); } } return checksum; } public long loadLeaderInfo(DataInputStream dis, long checksum) throws IOException { leaderIp = Text.readString(dis); leaderRpcPort = dis.readInt(); long newChecksum = checksum ^ leaderRpcPort; leaderHttpPort = dis.readInt(); newChecksum ^= leaderHttpPort; LOG.info("finished replay masterInfo from image"); return newChecksum; } public long saveLeaderInfo(DataOutputStream dos, long checksum) throws IOException { Text.writeString(dos, leaderIp); checksum ^= leaderRpcPort; dos.writeInt(leaderRpcPort); checksum ^= leaderHttpPort; dos.writeInt(leaderHttpPort); return checksum; } public void save(DataOutputStream dos) throws IOException, SRMetaBlockException { SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.NODE_MGR, 1); writer.writeJson(this); writer.close(); } public void load(SRMetaBlockReader reader) throws IOException, SRMetaBlockException, SRMetaBlockEOFException { NodeMgr nodeMgr = reader.readJson(NodeMgr.class); leaderRpcPort = nodeMgr.leaderRpcPort; leaderHttpPort = nodeMgr.leaderHttpPort; leaderIp = nodeMgr.leaderIp; frontends = nodeMgr.frontends; removedFrontends = nodeMgr.removedFrontends; systemInfo = nodeMgr.systemInfo; systemInfoMap.put(clusterId, systemInfo); brokerMgr = nodeMgr.brokerMgr; } public void setLeaderInfo() { this.leaderIp = FrontendOptions.getLocalHostAddress(); this.leaderRpcPort = Config.rpc_port; this.leaderHttpPort = Config.http_port; LeaderInfo info = new LeaderInfo(this.leaderIp, this.leaderHttpPort, this.leaderRpcPort); GlobalStateMgr.getCurrentState().getEditLog().logLeaderInfo(info); } public boolean isFirstTimeStartUp() { return isFirstTimeStartUp; } public boolean isElectable() { return isElectable; } public void setImageDir(String imageDir) { this.imageDir = imageDir; } }
There are more than one `JandexUtil` used inside the enhancer ? Stange ...
private String recursivelyFindEntityTypeFromClass(DotName clazz, DotName repositoryDotName) { if (clazz.equals(OBJECT_DOT_NAME)) { return null; } List<org.jboss.jandex.Type> typeParameters = io.quarkus.deployment.util.JandexUtil .resolveTypeParameters(clazz, repositoryDotName, indexView); if (typeParameters.isEmpty()) throw new IllegalStateException( "Failed to find supertype " + repositoryDotName + " from entity class " + clazz); org.jboss.jandex.Type entityType = typeParameters.get(0); return entityType.name().toString().replace('.', '/'); }
List<org.jboss.jandex.Type> typeParameters = io.quarkus.deployment.util.JandexUtil
private String recursivelyFindEntityTypeFromClass(DotName clazz, DotName repositoryDotName) { if (clazz.equals(OBJECT_DOT_NAME)) { return null; } List<org.jboss.jandex.Type> typeParameters = io.quarkus.deployment.util.JandexUtil .resolveTypeParameters(clazz, repositoryDotName, indexView); if (typeParameters.isEmpty()) throw new IllegalStateException( "Failed to find supertype " + repositoryDotName + " from entity class " + clazz); org.jboss.jandex.Type entityType = typeParameters.get(0); return entityType.name().toString().replace('.', '/'); }
class PanacheRepositoryClassVisitor extends ClassVisitor { protected Type entityType; protected String entitySignature; protected String entityBinaryType; protected String daoBinaryName; protected ClassInfo panacheRepositoryBaseClassInfo; protected IndexView indexView; public PanacheRepositoryClassVisitor(String className, ClassVisitor outputClassVisitor, ClassInfo panacheRepositoryBaseClassInfo, IndexView indexView) { super(Opcodes.ASM7, outputClassVisitor); daoBinaryName = className.replace('.', '/'); this.panacheRepositoryBaseClassInfo = panacheRepositoryBaseClassInfo; this.indexView = indexView; } protected abstract DotName getPanacheRepositoryDotName(); protected abstract DotName getPanacheRepositoryBaseDotName(); protected abstract String getPanacheOperationsBinaryName(); protected abstract String getModelDescriptor(); protected abstract void injectModel(MethodVisitor mv); @Override public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { super.visit(version, access, name, signature, superName, interfaces); final String repositoryClassName = name.replace('/', '.'); String foundEntityType = findEntityBinaryTypeForPanacheRepository(repositoryClassName, getPanacheRepositoryDotName()); if (foundEntityType == null) { foundEntityType = findEntityBinaryTypeForPanacheRepository(repositoryClassName, getPanacheRepositoryBaseDotName()); } entityBinaryType = foundEntityType; entitySignature = "L" + entityBinaryType + ";"; entityType = Type.getType(entitySignature); } private String findEntityBinaryTypeForPanacheRepository(String repositoryClassName, DotName repositoryDotName) { for (ClassInfo classInfo : indexView.getAllKnownImplementors(repositoryDotName)) { if (repositoryClassName.equals(classInfo.name().toString())) { return recursivelyFindEntityTypeFromClass(classInfo.name(), repositoryDotName); } } return null; } @Override public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { return super.visitMethod(access, name, descriptor, signature, exceptions); } @Override public void visitEnd() { for (MethodInfo method : panacheRepositoryBaseClassInfo.methods()) { AnnotationInstance bridge = method.annotation(JandexUtil.DOTNAME_GENERATE_BRIDGE); if (bridge != null) generateMethod(method, bridge.value("targetReturnTypeErased")); } super.visitEnd(); } private void generateMethod(MethodInfo method, AnnotationValue targetReturnTypeErased) { String descriptor = JandexUtil.getDescriptor(method, name -> name.equals("Entity") ? entitySignature : null); String signature = JandexUtil.getSignature(method, name -> name.equals("Entity") ? entitySignature : null); List<org.jboss.jandex.Type> parameters = method.parameters(); String castTo = null; if (targetReturnTypeErased != null && targetReturnTypeErased.asBoolean()) { org.jboss.jandex.Type type = method.returnType(); if (type.kind() == Kind.TYPE_VARIABLE) { if (type.asTypeVariable().identifier().equals("Entity")) castTo = entityBinaryType; } if (castTo == null) castTo = type.name().toString('/'); } MethodVisitor mv = super.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, method.name(), descriptor, signature, null); for (int i = 0; i < parameters.size(); i++) { mv.visitParameter(method.parameterName(i), 0 /* modifiers */); } mv.visitCode(); injectModel(mv); for (int i = 0; i < parameters.size(); i++) { mv.visitIntInsn(Opcodes.ALOAD, i + 1); } String forwardingDescriptor = "(" + getModelDescriptor() + descriptor.substring(1); if (castTo != null) { int lastParen = forwardingDescriptor.lastIndexOf(')'); forwardingDescriptor = forwardingDescriptor.substring(0, lastParen + 1) + "Ljava/lang/Object;"; } mv.visitMethodInsn(Opcodes.INVOKESTATIC, getPanacheOperationsBinaryName(), method.name(), forwardingDescriptor, false); if (castTo != null) mv.visitTypeInsn(Opcodes.CHECKCAST, castTo); String returnTypeDescriptor = descriptor.substring(descriptor.lastIndexOf(")") + 1); mv.visitInsn(JandexUtil.getReturnInstruction(returnTypeDescriptor)); mv.visitMaxs(0, 0); mv.visitEnd(); } }
class PanacheRepositoryClassVisitor extends ClassVisitor { protected Type entityType; protected String entitySignature; protected String entityBinaryType; protected String daoBinaryName; protected ClassInfo panacheRepositoryBaseClassInfo; protected IndexView indexView; public PanacheRepositoryClassVisitor(String className, ClassVisitor outputClassVisitor, ClassInfo panacheRepositoryBaseClassInfo, IndexView indexView) { super(Opcodes.ASM7, outputClassVisitor); daoBinaryName = className.replace('.', '/'); this.panacheRepositoryBaseClassInfo = panacheRepositoryBaseClassInfo; this.indexView = indexView; } protected abstract DotName getPanacheRepositoryDotName(); protected abstract DotName getPanacheRepositoryBaseDotName(); protected abstract String getPanacheOperationsBinaryName(); protected abstract String getModelDescriptor(); protected abstract void injectModel(MethodVisitor mv); @Override public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { super.visit(version, access, name, signature, superName, interfaces); final String repositoryClassName = name.replace('/', '.'); String foundEntityType = findEntityBinaryTypeForPanacheRepository(repositoryClassName, getPanacheRepositoryDotName()); if (foundEntityType == null) { foundEntityType = findEntityBinaryTypeForPanacheRepository(repositoryClassName, getPanacheRepositoryBaseDotName()); } entityBinaryType = foundEntityType; entitySignature = "L" + entityBinaryType + ";"; entityType = Type.getType(entitySignature); } private String findEntityBinaryTypeForPanacheRepository(String repositoryClassName, DotName repositoryDotName) { for (ClassInfo classInfo : indexView.getAllKnownImplementors(repositoryDotName)) { if (repositoryClassName.equals(classInfo.name().toString())) { return recursivelyFindEntityTypeFromClass(classInfo.name(), repositoryDotName); } } return null; } @Override public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { return super.visitMethod(access, name, descriptor, signature, exceptions); } @Override public void visitEnd() { for (MethodInfo method : panacheRepositoryBaseClassInfo.methods()) { AnnotationInstance bridge = method.annotation(JandexUtil.DOTNAME_GENERATE_BRIDGE); if (bridge != null) generateMethod(method, bridge.value("targetReturnTypeErased")); } super.visitEnd(); } private void generateMethod(MethodInfo method, AnnotationValue targetReturnTypeErased) { String descriptor = JandexUtil.getDescriptor(method, name -> name.equals("Entity") ? entitySignature : null); String signature = JandexUtil.getSignature(method, name -> name.equals("Entity") ? entitySignature : null); List<org.jboss.jandex.Type> parameters = method.parameters(); String castTo = null; if (targetReturnTypeErased != null && targetReturnTypeErased.asBoolean()) { org.jboss.jandex.Type type = method.returnType(); if (type.kind() == Kind.TYPE_VARIABLE) { if (type.asTypeVariable().identifier().equals("Entity")) castTo = entityBinaryType; } if (castTo == null) castTo = type.name().toString('/'); } MethodVisitor mv = super.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, method.name(), descriptor, signature, null); for (int i = 0; i < parameters.size(); i++) { mv.visitParameter(method.parameterName(i), 0 /* modifiers */); } mv.visitCode(); injectModel(mv); for (int i = 0; i < parameters.size(); i++) { mv.visitIntInsn(Opcodes.ALOAD, i + 1); } String forwardingDescriptor = "(" + getModelDescriptor() + descriptor.substring(1); if (castTo != null) { int lastParen = forwardingDescriptor.lastIndexOf(')'); forwardingDescriptor = forwardingDescriptor.substring(0, lastParen + 1) + "Ljava/lang/Object;"; } mv.visitMethodInsn(Opcodes.INVOKESTATIC, getPanacheOperationsBinaryName(), method.name(), forwardingDescriptor, false); if (castTo != null) mv.visitTypeInsn(Opcodes.CHECKCAST, castTo); String returnTypeDescriptor = descriptor.substring(descriptor.lastIndexOf(")") + 1); mv.visitInsn(JandexUtil.getReturnInstruction(returnTypeDescriptor)); mv.visitMaxs(0, 0); mv.visitEnd(); } }
Variable name of `describeStatement` should be `result`.
public ASTNode visitDesc(final DescContext ctx) { TableSegment tablename = (TableSegment) visit(ctx.tableName()); DescribeStatement describeStatement = new DescribeStatement(); describeStatement.setTableName(tablename); return describeStatement; }
DescribeStatement describeStatement = new DescribeStatement();
public ASTNode visitDesc(final DescContext ctx) { TableSegment table = (TableSegment) visit(ctx.tableName()); DescribeStatement result = new DescribeStatement(); result.setTable(table); return result; }
class MySQLVisitor extends MySQLStatementBaseVisitor<ASTNode> implements SQLVisitor { private int currentParameterIndex; @Override public ASTNode visitUse(final UseContext ctx) { LiteralValue schema = (LiteralValue) visit(ctx.schemaName()); UseStatement useStatement = new UseStatement(); useStatement.setSchema(schema.getLiteral()); return useStatement; } @Override @Override public ASTNode visitShowDatabases(final ShowDatabasesContext ctx) { ShowDatabasesStatement showDatabasesStatement = new ShowDatabasesStatement(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); showDatabasesStatement.getAllSQLSegments().add(showLikeSegment); } return showDatabasesStatement; } @Override public ASTNode visitShowTables(final ShowTablesContext ctx) { ShowTablesStatement showTablesStatement = new ShowTablesStatement(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != fromSchemaContext) { FromSchemaSegment fromSchemaSegment = (FromSchemaSegment) visit(ctx.fromSchema()); showTablesStatement.getAllSQLSegments().add(fromSchemaSegment); } if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); showTablesStatement.getAllSQLSegments().add(showLikeSegment); } return showTablesStatement; } @Override public ASTNode visitShowTableStatus(final ShowTableStatusContext ctx) { ShowTableStatusStatement showTableStatusStatement = new ShowTableStatusStatement(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != fromSchemaContext) { FromSchemaSegment fromSchemaSegment = (FromSchemaSegment) visit(ctx.fromSchema()); showTableStatusStatement.getAllSQLSegments().add(fromSchemaSegment); } if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); showTableStatusStatement.getAllSQLSegments().add(showLikeSegment); } return showTableStatusStatement; } @Override public ASTNode visitShowColumns(final ShowColumnsContext ctx) { ShowColumnsStatement showColumnsStatement = new ShowColumnsStatement(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != fromSchemaContext) { FromSchemaSegment fromSchemaSegment = (FromSchemaSegment) visit(ctx.fromSchema()); showColumnsStatement.getAllSQLSegments().add(fromSchemaSegment); } if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); showColumnsStatement.getAllSQLSegments().add(showLikeSegment); } return showColumnsStatement; } @Override public ASTNode visitShowIndex(final ShowIndexContext ctx) { ShowIndexStatement showIndexStatement = new ShowIndexStatement(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); if (null != fromSchemaContext) { FromSchemaSegment fromSchemaSegment = (FromSchemaSegment) visit(ctx.fromSchema()); showIndexStatement.getAllSQLSegments().add(fromSchemaSegment); } return showIndexStatement; } @Override public ASTNode visitShowCreateTable(final ShowCreateTableContext ctx) { ShowCreateTableStatement showCreateTableStatement = new ShowCreateTableStatement(); TableSegment tablename = (TableSegment) visit(ctx.tableName()); showCreateTableStatement.setTableName(tablename); return showCreateTableStatement; } @Override public ASTNode visitFromSchema(final FromSchemaContext ctx) { return new FromSchemaSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()); } @Override public ASTNode visitShowLike(final ShowLikeContext ctx) { LiteralValue literalValue = (LiteralValue) visit(ctx.stringLiterals()); return new ShowLikeSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), literalValue.getLiteral()); } @Override public ASTNode visitCreateUser(final CreateUserContext ctx) { return new DCLStatement(); } @Override public ASTNode visitCreateRole(final CreateRoleContext ctx) { return new DCLStatement(); } @Override public ASTNode visitDropUser(final DropUserContext ctx) { return new DCLStatement(); } @Override public ASTNode visitAlterUser(final AlterUserContext ctx) { return new DCLStatement(); } @Override public ASTNode visitRenameUser(final RenameUserContext ctx) { return new DCLStatement(); } @Override public ASTNode visitSetPassword(final SetPasswordContext ctx) { return new DCLStatement(); } @Override public ASTNode visitCreateTable(final CreateTableContext ctx) { CreateTableStatement result = new CreateTableStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); CreateDefinitionClause_Context createDefinitionClause = ctx.createDefinitionClause_(); if (null != createDefinitionClause) { for (CreateDefinition_Context createDefinition : createDefinitionClause.createDefinitions_().createDefinition_()) { ColumnDefinitionContext columnDefinition = createDefinition.columnDefinition(); if (null != columnDefinition) { ColumnDefinitionSegment columnDefinitionSegment = createColumnDefinitionSegment(columnDefinition, result); result.getColumnDefinitions().add(columnDefinitionSegment); result.getAllSQLSegments().add(columnDefinitionSegment); } ConstraintDefinition_Context constraintDefinition = createDefinition.constraintDefinition_(); ForeignKeyOption_Context foreignKeyOption = null == constraintDefinition ? null : constraintDefinition.foreignKeyOption_(); if (null != foreignKeyOption) { result.getAllSQLSegments().add((TableSegment) visit(foreignKeyOption.referenceDefinition_().tableName())); } } } CreateLikeClause_Context createLikeClause = ctx.createLikeClause_(); if (null != createLikeClause) { result.getAllSQLSegments().add((TableSegment) visit(createLikeClause)); } return result; } @Override public ASTNode visitAlterTable(final AlterTableContext ctx) { AlterTableStatement result = new AlterTableStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); if (null != ctx.alterDefinitionClause_()) { for (AlterSpecification_Context alterSpecification : ctx.alterDefinitionClause_().alterSpecification_()) { AddColumnSpecificationContext addColumnSpecification = alterSpecification.addColumnSpecification(); if (null != addColumnSpecification) { List<ColumnDefinitionContext> columnDefinitions = addColumnSpecification.columnDefinition(); ColumnDefinitionSegment columnDefinitionSegment = null; for (ColumnDefinitionContext columnDefinition : columnDefinitions) { columnDefinitionSegment = createColumnDefinitionSegment(columnDefinition, result); result.getAddedColumnDefinitions().add(columnDefinitionSegment); result.getAllSQLSegments().add(columnDefinitionSegment); } createColumnPositionSegment(addColumnSpecification.firstOrAfterColumn(), columnDefinitionSegment, result); } AddConstraintSpecificationContext addConstraintSpecification = alterSpecification.addConstraintSpecification(); ForeignKeyOption_Context foreignKeyOption = null == addConstraintSpecification ? null : addConstraintSpecification.constraintDefinition_().foreignKeyOption_(); if (null != foreignKeyOption) { result.getAllSQLSegments().add((TableSegment) visit(foreignKeyOption.referenceDefinition_().tableName())); } ChangeColumnSpecificationContext changeColumnSpecification = alterSpecification.changeColumnSpecification(); if (null != changeColumnSpecification) { createColumnPositionSegment(changeColumnSpecification.firstOrAfterColumn(), createColumnDefinitionSegment(changeColumnSpecification.columnDefinition(), result), result); } DropColumnSpecificationContext dropColumnSpecification = alterSpecification.dropColumnSpecification(); if (null != dropColumnSpecification) { result.getDroppedColumnNames().add(((ColumnSegment) visit(dropColumnSpecification)).getName()); } ModifyColumnSpecificationContext modifyColumnSpecification = alterSpecification.modifyColumnSpecification(); if (null != modifyColumnSpecification) { createColumnPositionSegment(modifyColumnSpecification.firstOrAfterColumn(), createColumnDefinitionSegment(modifyColumnSpecification.columnDefinition(), result), result); } } } return result; } @Override public ASTNode visitDropTable(final DropTableContext ctx) { DropTableStatement result = new DropTableStatement(); ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.tableNames()); result.getTables().addAll(tables.getValues()); result.getAllSQLSegments().addAll(tables.getValues()); return result; } @Override public ASTNode visitTruncateTable(final TruncateTableContext ctx) { DDLStatement result = new DDLStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.getAllSQLSegments().add(table); return result; } @Override public ASTNode visitCreateIndex(final CreateIndexContext ctx) { CreateIndexStatement result = new CreateIndexStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); return result; } @Override public ASTNode visitDropIndex(final DropIndexContext ctx) { DropIndexStatement result = new DropIndexStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); return result; } @Override public ASTNode visitIndexDefinition_(final IndexDefinition_Context ctx) { return visit(ctx.indexName()); } @Override public ASTNode visitCreateLikeClause_(final CreateLikeClause_Context ctx) { return visit(ctx.tableName()); } @Override public ASTNode visitDropColumnSpecification(final DropColumnSpecificationContext ctx) { return visit(ctx.columnName()); } @Override public ASTNode visitInsert(final InsertContext ctx) { InsertStatement result; if (null != ctx.insertValuesClause()) { result = (InsertStatement) visit(ctx.insertValuesClause()); } else { result = new InsertStatement(); SetAssignmentSegment segment = (SetAssignmentSegment) visit(ctx.setAssignmentsClause()); result.setSetAssignment(segment); result.getAllSQLSegments().add(segment); } if (null != ctx.onDuplicateKeyClause()) { ListValue<AssignmentSegment> segments = (ListValue<AssignmentSegment>) visit(ctx.onDuplicateKeyClause()); result.getAllSQLSegments().addAll(segments.getValues()); } TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitInsertValuesClause(final InsertValuesClauseContext ctx) { InsertStatement result = new InsertStatement(); if (null != ctx.columnNames()) { InsertColumnsSegment insertColumnsSegment = (InsertColumnsSegment) visit(ctx.columnNames()); result.setColumns(insertColumnsSegment); result.getAllSQLSegments().add(insertColumnsSegment); } Collection<InsertValuesSegment> insertValuesSegments = createInsertValuesSegments(ctx.assignmentValues()); result.getValues().addAll(insertValuesSegments); result.getAllSQLSegments().addAll(insertValuesSegments); return result; } @Override public ASTNode visitOnDuplicateKeyClause(final OnDuplicateKeyClauseContext ctx) { ListValue<AssignmentSegment> result = new ListValue<>(new LinkedList<AssignmentSegment>()); for (AssignmentContext each : ctx.assignment()) { result.getValues().add((AssignmentSegment) visit(each)); } return result; } @Override public ASTNode visitUpdate(final UpdateContext ctx) { UpdateStatement result = new UpdateStatement(); ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.tableReferences()); SetAssignmentSegment setSegment = (SetAssignmentSegment) visit(ctx.setAssignmentsClause()); result.getTables().addAll(tables.getValues()); result.setSetAssignment(setSegment); result.getAllSQLSegments().addAll(tables.getValues()); result.getAllSQLSegments().add(setSegment); if (null != ctx.whereClause()) { WhereSegment whereSegment = (WhereSegment) visit(ctx.whereClause()); result.setWhere(whereSegment); result.getAllSQLSegments().add(whereSegment); } result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitSetAssignmentsClause(final SetAssignmentsClauseContext ctx) { Collection<AssignmentSegment> assignments = new LinkedList<>(); for (AssignmentContext each : ctx.assignment()) { assignments.add((AssignmentSegment) visit(each)); } return new SetAssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), assignments); } @Override public ASTNode visitAssignmentValues(final AssignmentValuesContext ctx) { List<ExpressionSegment> segments = new LinkedList<>(); for (AssignmentValueContext each : ctx.assignmentValue()) { segments.add((ExpressionSegment) visit(each)); } return new InsertValuesSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), segments); } @Override public ASTNode visitAssignment(final AssignmentContext ctx) { ColumnSegment column = (ColumnSegment) visitColumnName(ctx.columnName()); ExpressionSegment value = (ExpressionSegment) visit(ctx.assignmentValue()); return new AssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), column, value); } @Override public ASTNode visitAssignmentValue(final AssignmentValueContext ctx) { ExprContext expr = ctx.expr(); if (null != expr) { return visit(expr); } return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitBlobValue(final BlobValueContext ctx) { return new LiteralValue(ctx.STRING_().getText()); } @Override public ASTNode visitDelete(final DeleteContext ctx) { DeleteStatement result = new DeleteStatement(); if (null != ctx.multipleTablesClause()) { ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.multipleTablesClause()); result.getTables().addAll(tables.getValues()); result.getAllSQLSegments().addAll(tables.getValues()); } else { TableSegment table = (TableSegment) visit(ctx.singleTableClause()); result.getTables().add(table); result.getAllSQLSegments().add(table); } if (null != ctx.whereClause()) { WhereSegment where = (WhereSegment) visit(ctx.whereClause()); result.setWhere(where); result.getAllSQLSegments().add(where); } result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitSingleTableClause(final SingleTableClauseContext ctx) { TableSegment result = (TableSegment) visit(ctx.tableName()); if (null != ctx.alias()) { result.setAlias(ctx.alias().getText()); } return result; } @Override public ASTNode visitMultipleTablesClause(final MultipleTablesClauseContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); result.combine((ListValue<TableSegment>) visit(ctx.multipleTableNames())); result.combine((ListValue<TableSegment>) visit(ctx.tableReferences())); return result; } @Override public ASTNode visitMultipleTableNames(final MultipleTableNamesContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); for (TableNameContext each : ctx.tableName()) { result.getValues().add((TableSegment) visit(each)); } return result; } @Override public ASTNode visitSelect(final SelectContext ctx) { SelectStatement result = (SelectStatement) visit(ctx.unionClause()); result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitUnionClause(final UnionClauseContext ctx) { return visit(ctx.selectClause(0)); } @Override public ASTNode visitSelectClause(final SelectClauseContext ctx) { SelectStatement result = new SelectStatement(); ProjectionsSegment projections = (ProjectionsSegment) visit(ctx.projections()); result.setProjections(projections); result.getAllSQLSegments().add(projections); if (null != ctx.selectSpecification()) { result.getProjections().setDistinctRow(isDistinct(ctx)); } if (null != ctx.fromClause()) { ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.fromClause()); result.getTables().addAll(tables.getValues()); result.getAllSQLSegments().addAll(tables.getValues()); } if (null != ctx.whereClause()) { WhereSegment where = (WhereSegment) visit(ctx.whereClause()); result.setWhere(where); result.getAllSQLSegments().add(where); } if (null != ctx.orderByClause()) { OrderBySegment orderBy = (OrderBySegment) visit(ctx.orderByClause()); result.setOrderBy(orderBy); result.getAllSQLSegments().add(orderBy); } return result; } @Override public ASTNode visitSelectSpecification(final SelectSpecificationContext ctx) { if (null != ctx.duplicateSpecification()) { return visit(ctx.duplicateSpecification()); } return new BooleanValue(false); } @Override public ASTNode visitDuplicateSpecification(final DuplicateSpecificationContext ctx) { String text = ctx.getText(); if ("DISTINCT".equalsIgnoreCase(text) || "DISTINCTROW".equalsIgnoreCase(text)) { return new BooleanValue(true); } return new BooleanValue(false); } @Override public ASTNode visitProjections(final ProjectionsContext ctx) { Collection<ProjectionSegment> projections = new LinkedList<>(); if (null != ctx.unqualifiedShorthand()) { projections.add( new ShorthandProjectionSegment(ctx.unqualifiedShorthand().getStart().getStartIndex(), ctx.unqualifiedShorthand().getStop().getStopIndex(), ctx.unqualifiedShorthand().getText())); } for (ProjectionContext each : ctx.projection()) { projections.add((ProjectionSegment) visit(each)); } ProjectionsSegment result = new ProjectionsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.getProjections().addAll(projections); return result; } @Override public ASTNode visitProjection(final ProjectionContext ctx) { if (null != ctx.qualifiedShorthand()) { QualifiedShorthandContext shorthand = ctx.qualifiedShorthand(); ShorthandProjectionSegment result = new ShorthandProjectionSegment(shorthand.getStart().getStartIndex(), shorthand.getStop().getStopIndex(), shorthand.getText()); result.setOwner(new TableSegment(shorthand.identifier().getStart().getStartIndex(), shorthand.identifier().getStop().getStopIndex(), shorthand.identifier().getText())); return result; } String alias = null == ctx.alias() ? null : ctx.alias().getText(); if (null != ctx.columnName()) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); ColumnProjectionSegment result = new ColumnProjectionSegment(ctx.columnName().getText(), column); result.setAlias(alias); return result; } LiteralExpressionSegment column = (LiteralExpressionSegment) visit(ctx.expr()); ExpressionProjectionSegment result = Strings.isNullOrEmpty(alias) ? new ExpressionProjectionSegment(column.getStartIndex(), column.getStopIndex(), String.valueOf(column.getLiterals())) : new ExpressionProjectionSegment(column.getStartIndex(), ctx.alias().stop.getStopIndex(), String.valueOf(column.getLiterals())); result.setAlias(alias); return result; } @Override public ASTNode visitFromClause(final FromClauseContext ctx) { return visit(ctx.tableReferences()); } @Override public ASTNode visitTableReferences(final TableReferencesContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); for (EscapedTableReferenceContext each : ctx.escapedTableReference()) { result.combine((ListValue<TableSegment>) visit(each)); } return result; } @Override public ASTNode visitEscapedTableReference(final EscapedTableReferenceContext ctx) { return visit(ctx.tableReference()); } @Override public ASTNode visitTableReference(final TableReferenceContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); if (null != ctx.joinedTable()) { for (JoinedTableContext each : ctx.joinedTable()) { result.getValues().add((TableSegment) visit(each)); } } if (null != ctx.tableFactor()) { result.getValues().add((TableSegment) visit(ctx.tableFactor())); } return result; } @Override public ASTNode visitTableFactor(final TableFactorContext ctx) { if (null != ctx.tableReferences()) { return visit(ctx.tableReferences()); } TableSegment table = (TableSegment) visit(ctx.tableName()); if (null != ctx.alias()) { table.setAlias(ctx.alias().getText()); } return table; } @Override public ASTNode visitJoinedTable(final JoinedTableContext ctx) { return visit(ctx.tableFactor()); } @Override public ASTNode visitWhereClause(final WhereClauseContext ctx) { WhereSegment result = new WhereSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.setParameterMarkerStartIndex(currentParameterIndex); ASTNode segment = visit(ctx.expr()); if (segment instanceof OrPredicateSegment) { result.getAndPredicates().addAll(((OrPredicateSegment) segment).getAndPredicates()); } else if (segment instanceof PredicateSegment) { AndPredicate andPredicate = new AndPredicate(); andPredicate.getPredicates().add((PredicateSegment) segment); result.getAndPredicates().add(andPredicate); } result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitSetTransaction(final SetTransactionContext ctx) { return new SetTransactionStatement(); } @Override public ASTNode visitSetAutoCommit(final SetAutoCommitContext ctx) { SetAutoCommitStatement result = new SetAutoCommitStatement(); AutoCommitValueContext autoCommitValueContext = ctx.autoCommitValue(); if (null != autoCommitValueContext) { AutoCommitSegment autoCommitSegment = (AutoCommitSegment) visit(ctx.autoCommitValue()); result.getAllSQLSegments().add(autoCommitSegment); result.setAutoCommit(autoCommitSegment.isAutoCommit()); } return result; } @Override public ASTNode visitAutoCommitValue(final AutoCommitValueContext ctx) { boolean autoCommit = "1".equals(ctx.getText()) || "ON".equals(ctx.getText()); return new AutoCommitSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), autoCommit); } @Override public ASTNode visitBeginTransaction(final BeginTransactionContext ctx) { return new BeginTransactionStatement(); } @Override public ASTNode visitCommit(final CommitContext ctx) { return new CommitStatement(); } @Override public ASTNode visitRollback(final RollbackContext ctx) { return new RollbackStatement(); } @Override public ASTNode visitSavepoint(final SavepointContext ctx) { return new SavepointStatement(); } @Override public ASTNode visitSchemaName(final SchemaNameContext ctx) { return visit(ctx.identifier()); } @Override public ASTNode visitTableNames(final TableNamesContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); for (TableNameContext each : ctx.tableName()) { result.getValues().add((TableSegment) visit(each)); } return result; } @Override public ASTNode visitTableName(final TableNameContext ctx) { LiteralValue tableName = (LiteralValue) visit(ctx.name()); TableSegment result = new TableSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), tableName.getLiteral()); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(createSchemaSegment(owner)); } return result; } @Override public ASTNode visitColumnNames(final ColumnNamesContext ctx) { Collection<ColumnSegment> segments = new LinkedList<>(); for (ColumnNameContext each : ctx.columnName()) { segments.add((ColumnSegment) visit(each)); } InsertColumnsSegment result = new InsertColumnsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.getColumns().addAll(segments); return result; } @Override public ASTNode visitColumnName(final ColumnNameContext ctx) { LiteralValue columnName = (LiteralValue) visit(ctx.name()); ColumnSegment result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnName.getLiteral()); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(createTableSegment(owner)); } return result; } @Override public ASTNode visitIndexName(final IndexNameContext ctx) { LiteralValue indexName = (LiteralValue) visit(ctx.identifier()); return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName.getLiteral()); } @Override public ASTNode visitDataTypeName_(final DataTypeName_Context ctx) { return visit(ctx.identifier(0)); } @Override public ASTNode visitExpr(final ExprContext ctx) { BooleanPrimaryContext bool = ctx.booleanPrimary(); if (null != bool) { return visit(bool); } else if (null != ctx.logicalOperator()) { return mergePredicateSegment(visit(ctx.expr(0)), visit(ctx.expr(1)), ctx.logicalOperator().getText()); } else if (!ctx.expr().isEmpty()) { return visit(ctx.expr(0)); } return createExpressionSegment(new LiteralValue(ctx.getText()), ctx); } @Override public ASTNode visitBooleanPrimary(final BooleanPrimaryContext ctx) { if (null != ctx.subquery()) { return new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.subquery().getText()); } if (null != ctx.comparisonOperator()) { return createCompareSegment(ctx); } if (null != ctx.predicate()) { return visit(ctx.predicate()); } return createExpressionSegment(new LiteralValue(ctx.getText()), ctx); } @Override public ASTNode visitPredicate(final PredicateContext ctx) { if (null != ctx.subquery()) { return new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.subquery().getText()); } if (null != ctx.IN()) { return createInSegment(ctx); } if (null != ctx.BETWEEN()) { createBetweenSegment(ctx); } BitExprContext bitExpr = ctx.bitExpr(0); if (null != bitExpr) { return createExpressionSegment(visit(bitExpr), ctx); } return createExpressionSegment(new LiteralValue(ctx.getText()), ctx); } @Override public ASTNode visitBitExpr(final BitExprContext ctx) { SimpleExprContext simple = ctx.simpleExpr(); if (null != simple) { return visit(simple); } return new LiteralValue(ctx.getText()); } @Override public ASTNode visitSimpleExpr(final SimpleExprContext ctx) { if (null != ctx.subquery()) { return new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.subquery().getText()); } if (null != ctx.parameterMarker()) { return visit(ctx.parameterMarker()); } if (null != ctx.literals()) { return visit(ctx.literals()); } if (null != ctx.intervalExpression()) { return visit(ctx.intervalExpression()); } if (null != ctx.functionCall()) { return visit(ctx.functionCall()); } if (null != ctx.columnName()) { return visit(ctx.columnName()); } return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitParameterMarker(final ParameterMarkerContext ctx) { return new ParameterValue(currentParameterIndex++); } @Override public ASTNode visitLiterals(final LiteralsContext ctx) { if (null != ctx.stringLiterals()) { return visit(ctx.stringLiterals()); } if (null != ctx.numberLiterals()) { return visit(ctx.numberLiterals()); } if (null != ctx.booleanLiterals()) { return visit(ctx.booleanLiterals()); } if (null != ctx.nullValueLiterals()) { return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } return new LiteralValue(ctx.getText()); } @Override public ASTNode visitStringLiterals(final StringLiteralsContext ctx) { String text = ctx.getText(); return new LiteralValue(text.substring(1, text.length() - 1)); } @Override public ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { return new NumberValue(ctx.getText()); } @Override public ASTNode visitBooleanLiterals(final BooleanLiteralsContext ctx) { return new BooleanValue(ctx.getText()); } @Override public ASTNode visitIntervalExpression(final IntervalExpressionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitOrderByClause(final OrderByClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (OrderByItemContext each : ctx.orderByItem()) { items.add((OrderByItemSegment) visit(each)); } return new OrderBySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), items); } @Override public ASTNode visitOrderByItem(final OrderByItemContext ctx) { OrderDirection orderDirection = null != ctx.DESC() ? OrderDirection.DESC : OrderDirection.ASC; if (null != ctx.columnName()) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); return new ColumnOrderByItemSegment(column, orderDirection); } if (null != ctx.numberLiterals()) { return new IndexOrderByItemSegment(ctx.numberLiterals().getStart().getStartIndex(), ctx.numberLiterals().getStop().getStopIndex(), SQLUtil.getExactlyNumber(ctx.numberLiterals().getText(), 10).intValue(), orderDirection); } return new ExpressionOrderByItemSegment(ctx.expr().getStart().getStartIndex(), ctx.expr().getStop().getStopIndex(), ctx.expr().getText(), orderDirection); } @Override public ASTNode visitFunctionCall(final FunctionCallContext ctx) { if (null != ctx.aggregationFunction()) { return visit(ctx.aggregationFunction()); } if (null != ctx.regularFunction()) { return visit(ctx.regularFunction()); } if (null != ctx.specialFunction()) { return visit(ctx.specialFunction()); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitAggregationFunction(final AggregationFunctionContext ctx) { if (AggregationType.isAggregationType(ctx.aggregationFunctionName_().getText())) { return createAggregationSegment(ctx); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitSpecialFunction(final SpecialFunctionContext ctx) { if (null != ctx.groupConcatFunction()) { return visit(ctx.groupConcatFunction()); } if (null != ctx.windowFunction()) { return visit(ctx.windowFunction()); } if (null != ctx.castFunction()) { return visit(ctx.castFunction()); } if (null != ctx.convertFunction()) { return visit(ctx.convertFunction()); } if (null != ctx.positionFunction()) { return visit(ctx.positionFunction()); } if (null != ctx.substringFunction()) { return visit(ctx.substringFunction()); } if (null != ctx.extractFunction()) { return visit(ctx.extractFunction()); } if (null != ctx.charFunction()) { return visit(ctx.charFunction()); } if (null != ctx.weightStringFunction()) { return visit(ctx.weightStringFunction()); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitGroupConcatFunction(final GroupConcatFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitWindowFunction(final WindowFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitCastFunction(final CastFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitConvertFunction(final ConvertFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitPositionFunction(final PositionFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitSubstringFunction(final SubstringFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitExtractFunction(final ExtractFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitCharFunction(final CharFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitWeightStringFunction(final WeightStringFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitRegularFunction(final RegularFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWord_Context unreservedWord = ctx.unreservedWord_(); if (null != unreservedWord) { return visit(unreservedWord); } return new LiteralValue(ctx.getText()); } @Override public ASTNode visitUnreservedWord_(final UnreservedWord_Context ctx) { return new LiteralValue(ctx.getText()); } private SchemaSegment createSchemaSegment(final OwnerContext ownerContext) { LiteralValue literalValue = (LiteralValue) visit(ownerContext.identifier()); return new SchemaSegment(ownerContext.getStart().getStartIndex(), ownerContext.getStop().getStopIndex(), literalValue.getLiteral()); } private TableSegment createTableSegment(final OwnerContext ownerContext) { LiteralValue literalValue = (LiteralValue) visit(ownerContext.identifier()); return new TableSegment(ownerContext.getStart().getStartIndex(), ownerContext.getStop().getStopIndex(), literalValue.getLiteral()); } private ASTNode createExpressionSegment(final ASTNode astNode, final ParserRuleContext context) { if (astNode instanceof LiteralValue) { return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((LiteralValue) astNode).getLiteral()); } if (astNode instanceof NumberValue) { return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((NumberValue) astNode).getNumber()); } if (astNode instanceof ParameterValue) { return new ParameterMarkerExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((ParameterValue) astNode).getParameterIndex()); } return astNode; } private ColumnDefinitionSegment createColumnDefinitionSegment(final ColumnDefinitionContext columnDefinition, final DDLStatement statement) { ColumnSegment column = (ColumnSegment) visit(columnDefinition.columnName()); LiteralValue dataType = (LiteralValue) visit(columnDefinition.dataType().dataTypeName_()); boolean isPrimaryKey = false; for (InlineDataType_Context inlineDataType : columnDefinition.inlineDataType_()) { CommonDataTypeOption_Context commonDataTypeOption = inlineDataType.commonDataTypeOption_(); if (null != commonDataTypeOption) { if (null != commonDataTypeOption.primaryKey()) { isPrimaryKey = true; } if (null != commonDataTypeOption.referenceDefinition_()) { statement.getAllSQLSegments().add((TableSegment) visit(commonDataTypeOption.referenceDefinition_().tableName())); } } } for (GeneratedDataType_Context generatedDataType: columnDefinition.generatedDataType_()) { CommonDataTypeOption_Context commonDataTypeOption = generatedDataType.commonDataTypeOption_(); if (null != commonDataTypeOption) { if (null != commonDataTypeOption.primaryKey()) { isPrimaryKey = true; } if (null != commonDataTypeOption.referenceDefinition_()) { statement.getAllSQLSegments().add((TableSegment) visit(commonDataTypeOption.referenceDefinition_().tableName())); } } } return new ColumnDefinitionSegment(column.getStartIndex(), column.getStopIndex(), column.getName(), dataType.getLiteral(), isPrimaryKey); } private void createColumnPositionSegment(final FirstOrAfterColumnContext firstOrAfterColumn, final ColumnDefinitionSegment columnDefinition, final AlterTableStatement statement) { if (null != firstOrAfterColumn) { ColumnPositionSegment columnPositionSegment = null; if (null != firstOrAfterColumn.FIRST()) { columnPositionSegment = new ColumnFirstPositionSegment(columnDefinition.getStartIndex(), columnDefinition.getStopIndex(), columnDefinition.getColumnName()); } else if (null != firstOrAfterColumn.AFTER()) { ColumnSegment afterColumn = (ColumnSegment) visit(firstOrAfterColumn.columnName()); columnPositionSegment = new ColumnAfterPositionSegment(columnDefinition.getStartIndex(), columnDefinition.getStopIndex(), columnDefinition.getColumnName(), afterColumn.getName()); } statement.getChangedPositionColumns().add(columnPositionSegment); statement.getAllSQLSegments().add(columnPositionSegment); } } private Collection<InsertValuesSegment> createInsertValuesSegments(final Collection<AssignmentValuesContext> assignmentValuesContexts) { Collection<InsertValuesSegment> result = new LinkedList<>(); for (AssignmentValuesContext each : assignmentValuesContexts) { result.add((InsertValuesSegment) visit(each)); } return result; } private ASTNode createAggregationSegment(final AggregationFunctionContext ctx) { AggregationType type = AggregationType.valueOf(ctx.aggregationFunctionName_().getText()); int innerExpressionStartIndex = ((TerminalNode) ctx.getChild(1)).getSymbol().getStartIndex(); if (null != ctx.distinct()) { return new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText(), type, innerExpressionStartIndex, getDistinctExpression(ctx)); } return new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText(), type, innerExpressionStartIndex); } private String getDistinctExpression(final AggregationFunctionContext ctx) { StringBuilder result = new StringBuilder(); for (int i = 3; i < ctx.getChildCount() - 1; i++) { result.append(ctx.getChild(i).getText()); } return result.toString(); } private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) { ASTNode leftValue = visit(ctx.booleanPrimary()); ASTNode rightValue = visit(ctx.predicate()); if (rightValue instanceof ColumnSegment) { return new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (ColumnSegment) leftValue, (ColumnSegment) rightValue); } return new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (ColumnSegment) leftValue, new PredicateCompareRightValue(ctx.comparisonOperator().getText(), (ExpressionSegment) rightValue)); } private ASTNode createInSegment(final PredicateContext ctx) { ColumnSegment column = (ColumnSegment) visit(ctx.bitExpr(0)); Collection<ExpressionSegment> segments = Lists.transform(ctx.expr(), new Function<ExprContext, ExpressionSegment>() { @Override public ExpressionSegment apply(final ExprContext input) { return (ExpressionSegment) visit(input); } }); return new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, new PredicateInRightValue(segments)); } private void createBetweenSegment(final PredicateContext ctx) { ColumnSegment column = (ColumnSegment) visit(ctx.bitExpr(0)); ExpressionSegment between = (ExpressionSegment) visit(ctx.bitExpr(1)); ExpressionSegment and = (ExpressionSegment) visit(ctx.predicate()); new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, new PredicateBetweenRightValue(between, and)); } private OrPredicateSegment mergePredicateSegment(final ASTNode left, final ASTNode right, final String operator) { Optional<LogicalOperator> logicalOperator = LogicalOperator.valueFrom(operator); Preconditions.checkState(logicalOperator.isPresent()); if (LogicalOperator.OR == logicalOperator.get()) { return mergeOrPredicateSegment(left, right); } return mergeAndPredicateSegment(left, right); } private OrPredicateSegment mergeOrPredicateSegment(final ASTNode left, final ASTNode right) { OrPredicateSegment result = new OrPredicateSegment(); result.getAndPredicates().addAll(getAndPredicates(left)); result.getAndPredicates().addAll(getAndPredicates(right)); return result; } private OrPredicateSegment mergeAndPredicateSegment(final ASTNode left, final ASTNode right) { OrPredicateSegment result = new OrPredicateSegment(); for (AndPredicate eachLeft : getAndPredicates(left)) { for (AndPredicate eachRight : getAndPredicates(right)) { result.getAndPredicates().add(createAndPredicate(eachLeft, eachRight)); } } return result; } private AndPredicate createAndPredicate(final AndPredicate left, final AndPredicate right) { AndPredicate result = new AndPredicate(); result.getPredicates().addAll(left.getPredicates()); result.getPredicates().addAll(right.getPredicates()); return result; } private Collection<AndPredicate> getAndPredicates(final ASTNode astNode) { if (astNode instanceof OrPredicateSegment) { return ((OrPredicateSegment) astNode).getAndPredicates(); } if (astNode instanceof AndPredicate) { return Collections.singleton((AndPredicate) astNode); } AndPredicate andPredicate = new AndPredicate(); andPredicate.getPredicates().add((PredicateSegment) astNode); return Collections.singleton(andPredicate); } private boolean isDistinct(final SelectClauseContext ctx) { for (SelectSpecificationContext each : ctx.selectSpecification()) { boolean eachDistinct = ((BooleanValue) visit(each)).isCorrect(); if (eachDistinct) { return true; } } return false; } private void calculateParameterCount(final Collection<ExprContext> exprContexts) { for (ExprContext each : exprContexts) { visit(each); } } }
class MySQLVisitor extends MySQLStatementBaseVisitor<ASTNode> implements SQLVisitor { private int currentParameterIndex; @Override public ASTNode visitUse(final UseContext ctx) { LiteralValue schema = (LiteralValue) visit(ctx.schemaName()); UseStatement result = new UseStatement(); result.setSchema(schema.getLiteral()); return result; } @Override @Override public ASTNode visitShowDatabases(final ShowDatabasesContext ctx) { ShowDatabasesStatement result = new ShowDatabasesStatement(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); result.getAllSQLSegments().add(showLikeSegment); } return result; } @Override public ASTNode visitShowTables(final ShowTablesContext ctx) { ShowTablesStatement result = new ShowTablesStatement(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != fromSchemaContext) { FromSchemaSegment fromSchemaSegment = (FromSchemaSegment) visit(ctx.fromSchema()); result.getAllSQLSegments().add(fromSchemaSegment); } if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); result.getAllSQLSegments().add(showLikeSegment); } return result; } @Override public ASTNode visitShowTableStatus(final ShowTableStatusContext ctx) { ShowTableStatusStatement result = new ShowTableStatusStatement(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != fromSchemaContext) { FromSchemaSegment fromSchemaSegment = (FromSchemaSegment) visit(ctx.fromSchema()); result.getAllSQLSegments().add(fromSchemaSegment); } if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); result.getAllSQLSegments().add(showLikeSegment); } return result; } @Override public ASTNode visitShowColumns(final ShowColumnsContext ctx) { ShowColumnsStatement result = new ShowColumnsStatement(); FromTableContext fromTableContext = ctx.fromTable(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); ShowLikeContext showLikeContext = ctx.showLike(); if (null != fromTableContext) { FromTableSegment fromTableSegment = (FromTableSegment) visit(fromTableContext); result.setTable(fromTableSegment.getPattern()); result.getAllSQLSegments().add(fromTableSegment); } if (null != fromSchemaContext) { FromSchemaSegment fromSchemaSegment = (FromSchemaSegment) visit(ctx.fromSchema()); result.getAllSQLSegments().add(fromSchemaSegment); } if (null != showLikeContext) { ShowLikeSegment showLikeSegment = (ShowLikeSegment) visit(ctx.showLike()); result.getAllSQLSegments().add(showLikeSegment); } return result; } @Override public ASTNode visitShowIndex(final ShowIndexContext ctx) { ShowIndexStatement result = new ShowIndexStatement(); FromSchemaContext fromSchemaContext = ctx.fromSchema(); FromTableContext fromTableContext = ctx.fromTable(); if (null != fromSchemaContext) { SchemaNameContext schemaNameContext = fromSchemaContext.schemaName(); LiteralValue schema = (LiteralValue) visit(schemaNameContext); SchemaSegment schemaSegment = new SchemaSegment(schemaNameContext.start.getStartIndex(), schemaNameContext.stop.getStopIndex(), schema.getLiteral()); result.getAllSQLSegments().add(schemaSegment); } if (null != fromTableContext) { FromTableSegment fromTableSegment = (FromTableSegment) visitFromTable(fromTableContext); TableSegment tableSegment = fromTableSegment.getPattern(); result.setTable(tableSegment); result.getAllSQLSegments().add(tableSegment); } return result; } @Override public ASTNode visitShowCreateTable(final ShowCreateTableContext ctx) { ShowCreateTableStatement result = new ShowCreateTableStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); return result; } @Override public ASTNode visitFromTable(final FromTableContext ctx) { FromTableSegment fromTableSegment = new FromTableSegment(); TableSegment tableSegment = (TableSegment) visit(ctx.tableName()); fromTableSegment.setPattern(tableSegment); return fromTableSegment; } @Override public ASTNode visitFromSchema(final FromSchemaContext ctx) { return new FromSchemaSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()); } @Override public ASTNode visitShowLike(final ShowLikeContext ctx) { LiteralValue literalValue = (LiteralValue) visit(ctx.stringLiterals()); return new ShowLikeSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), literalValue.getLiteral()); } @Override public ASTNode visitCreateUser(final CreateUserContext ctx) { return new CreateUserStatement(); } @Override public ASTNode visitDropRole(final DropRoleContext ctx) { return new DropRoleStatement(); } @Override public ASTNode visitSetDefaultRole(final SetDefaultRoleContext ctx) { return new SetRoleStatement(); } @Override public ASTNode visitCreateRole(final CreateRoleContext ctx) { return new CreateRoleStatement(); } @Override public ASTNode visitDropUser(final DropUserContext ctx) { return new DropUserStatement(); } @Override public ASTNode visitAlterUser(final AlterUserContext ctx) { return new AlterUserStatement(); } @Override public ASTNode visitRenameUser(final RenameUserContext ctx) { return new RenameUserStatement(); } @Override public ASTNode visitSetPassword(final SetPasswordContext ctx) { return new SetPasswordStatement(); } @Override public ASTNode visitCreateTable(final CreateTableContext ctx) { CreateTableStatement result = new CreateTableStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); CreateDefinitionClause_Context createDefinitionClause = ctx.createDefinitionClause_(); if (null != createDefinitionClause) { for (CreateDefinition_Context createDefinition : createDefinitionClause.createDefinitions_().createDefinition_()) { ColumnDefinitionContext columnDefinition = createDefinition.columnDefinition(); if (null != columnDefinition) { ColumnDefinitionSegment columnDefinitionSegment = createColumnDefinitionSegment(columnDefinition, result); result.getColumnDefinitions().add(columnDefinitionSegment); result.getAllSQLSegments().add(columnDefinitionSegment); } ConstraintDefinition_Context constraintDefinition = createDefinition.constraintDefinition_(); ForeignKeyOption_Context foreignKeyOption = null == constraintDefinition ? null : constraintDefinition.foreignKeyOption_(); if (null != foreignKeyOption) { result.getAllSQLSegments().add((TableSegment) visit(foreignKeyOption.referenceDefinition_().tableName())); } } } CreateLikeClause_Context createLikeClause = ctx.createLikeClause_(); if (null != createLikeClause) { result.getAllSQLSegments().add((TableSegment) visit(createLikeClause)); } return result; } @Override public ASTNode visitAlterTable(final AlterTableContext ctx) { AlterTableStatement result = new AlterTableStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); if (null != ctx.alterDefinitionClause_()) { for (AlterSpecification_Context alterSpecification : ctx.alterDefinitionClause_().alterSpecification_()) { AddColumnSpecificationContext addColumnSpecification = alterSpecification.addColumnSpecification(); if (null != addColumnSpecification) { List<ColumnDefinitionContext> columnDefinitions = addColumnSpecification.columnDefinition(); ColumnDefinitionSegment columnDefinitionSegment = null; for (ColumnDefinitionContext columnDefinition : columnDefinitions) { columnDefinitionSegment = createColumnDefinitionSegment(columnDefinition, result); result.getAddedColumnDefinitions().add(columnDefinitionSegment); result.getAllSQLSegments().add(columnDefinitionSegment); } createColumnPositionSegment(addColumnSpecification.firstOrAfterColumn(), columnDefinitionSegment, result); } AddConstraintSpecificationContext addConstraintSpecification = alterSpecification.addConstraintSpecification(); ForeignKeyOption_Context foreignKeyOption = null == addConstraintSpecification ? null : addConstraintSpecification.constraintDefinition_().foreignKeyOption_(); if (null != foreignKeyOption) { result.getAllSQLSegments().add((TableSegment) visit(foreignKeyOption.referenceDefinition_().tableName())); } ChangeColumnSpecificationContext changeColumnSpecification = alterSpecification.changeColumnSpecification(); if (null != changeColumnSpecification) { createColumnPositionSegment(changeColumnSpecification.firstOrAfterColumn(), createColumnDefinitionSegment(changeColumnSpecification.columnDefinition(), result), result); } DropColumnSpecificationContext dropColumnSpecification = alterSpecification.dropColumnSpecification(); if (null != dropColumnSpecification) { result.getDroppedColumnNames().add(((ColumnSegment) visit(dropColumnSpecification)).getName()); } ModifyColumnSpecificationContext modifyColumnSpecification = alterSpecification.modifyColumnSpecification(); if (null != modifyColumnSpecification) { createColumnPositionSegment(modifyColumnSpecification.firstOrAfterColumn(), createColumnDefinitionSegment(modifyColumnSpecification.columnDefinition(), result), result); } } } return result; } @Override public ASTNode visitDropTable(final DropTableContext ctx) { DropTableStatement result = new DropTableStatement(); ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.tableNames()); result.getTables().addAll(tables.getValues()); result.getAllSQLSegments().addAll(tables.getValues()); return result; } @Override public ASTNode visitTruncateTable(final TruncateTableContext ctx) { TruncateStatement result = new TruncateStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.getAllSQLSegments().add(table); result.getTables().add(table); return result; } @Override public ASTNode visitCreateIndex(final CreateIndexContext ctx) { CreateIndexStatement result = new CreateIndexStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); return result; } @Override public ASTNode visitDropIndex(final DropIndexContext ctx) { DropIndexStatement result = new DropIndexStatement(); TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); return result; } @Override public ASTNode visitIndexDefinition_(final IndexDefinition_Context ctx) { return visit(ctx.indexName()); } @Override public ASTNode visitCreateLikeClause_(final CreateLikeClause_Context ctx) { return visit(ctx.tableName()); } @Override public ASTNode visitDropColumnSpecification(final DropColumnSpecificationContext ctx) { return visit(ctx.columnName()); } @Override public ASTNode visitInsert(final InsertContext ctx) { InsertStatement result; if (null != ctx.insertValuesClause()) { result = (InsertStatement) visit(ctx.insertValuesClause()); } else { result = new InsertStatement(); SetAssignmentSegment segment = (SetAssignmentSegment) visit(ctx.setAssignmentsClause()); result.setSetAssignment(segment); result.getAllSQLSegments().add(segment); } if (null != ctx.onDuplicateKeyClause()) { ListValue<AssignmentSegment> segments = (ListValue<AssignmentSegment>) visit(ctx.onDuplicateKeyClause()); result.getAllSQLSegments().addAll(segments.getValues()); } TableSegment table = (TableSegment) visit(ctx.tableName()); result.setTable(table); result.getAllSQLSegments().add(table); result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitInsertValuesClause(final InsertValuesClauseContext ctx) { InsertStatement result = new InsertStatement(); if (null != ctx.columnNames()) { InsertColumnsSegment insertColumnsSegment = (InsertColumnsSegment) visit(ctx.columnNames()); result.setColumns(insertColumnsSegment); result.getAllSQLSegments().add(insertColumnsSegment); } Collection<InsertValuesSegment> insertValuesSegments = createInsertValuesSegments(ctx.assignmentValues()); result.getValues().addAll(insertValuesSegments); result.getAllSQLSegments().addAll(insertValuesSegments); return result; } @Override public ASTNode visitOnDuplicateKeyClause(final OnDuplicateKeyClauseContext ctx) { ListValue<AssignmentSegment> result = new ListValue<>(new LinkedList<AssignmentSegment>()); for (AssignmentContext each : ctx.assignment()) { result.getValues().add((AssignmentSegment) visit(each)); } return result; } @Override public ASTNode visitUpdate(final UpdateContext ctx) { UpdateStatement result = new UpdateStatement(); ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.tableReferences()); SetAssignmentSegment setSegment = (SetAssignmentSegment) visit(ctx.setAssignmentsClause()); result.getTables().addAll(tables.getValues()); result.setSetAssignment(setSegment); result.getAllSQLSegments().addAll(tables.getValues()); result.getAllSQLSegments().add(setSegment); if (null != ctx.whereClause()) { WhereSegment whereSegment = (WhereSegment) visit(ctx.whereClause()); result.setWhere(whereSegment); result.getAllSQLSegments().add(whereSegment); } result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitSetAssignmentsClause(final SetAssignmentsClauseContext ctx) { Collection<AssignmentSegment> assignments = new LinkedList<>(); for (AssignmentContext each : ctx.assignment()) { assignments.add((AssignmentSegment) visit(each)); } return new SetAssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), assignments); } @Override public ASTNode visitAssignmentValues(final AssignmentValuesContext ctx) { List<ExpressionSegment> segments = new LinkedList<>(); for (AssignmentValueContext each : ctx.assignmentValue()) { segments.add((ExpressionSegment) visit(each)); } return new InsertValuesSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), segments); } @Override public ASTNode visitAssignment(final AssignmentContext ctx) { ColumnSegment column = (ColumnSegment) visitColumnName(ctx.columnName()); ExpressionSegment value = (ExpressionSegment) visit(ctx.assignmentValue()); return new AssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), column, value); } @Override public ASTNode visitAssignmentValue(final AssignmentValueContext ctx) { ExprContext expr = ctx.expr(); if (null != expr) { return visit(expr); } return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitBlobValue(final BlobValueContext ctx) { return new LiteralValue(ctx.STRING_().getText()); } @Override public ASTNode visitDelete(final DeleteContext ctx) { DeleteStatement result = new DeleteStatement(); if (null != ctx.multipleTablesClause()) { ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.multipleTablesClause()); result.getTables().addAll(tables.getValues()); result.getAllSQLSegments().addAll(tables.getValues()); } else { TableSegment table = (TableSegment) visit(ctx.singleTableClause()); result.getTables().add(table); result.getAllSQLSegments().add(table); } if (null != ctx.whereClause()) { WhereSegment where = (WhereSegment) visit(ctx.whereClause()); result.setWhere(where); result.getAllSQLSegments().add(where); } result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitSingleTableClause(final SingleTableClauseContext ctx) { TableSegment result = (TableSegment) visit(ctx.tableName()); if (null != ctx.alias()) { result.setAlias(ctx.alias().getText()); } return result; } @Override public ASTNode visitMultipleTablesClause(final MultipleTablesClauseContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); result.combine((ListValue<TableSegment>) visit(ctx.multipleTableNames())); result.combine((ListValue<TableSegment>) visit(ctx.tableReferences())); return result; } @Override public ASTNode visitMultipleTableNames(final MultipleTableNamesContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); for (TableNameContext each : ctx.tableName()) { result.getValues().add((TableSegment) visit(each)); } return result; } @Override public ASTNode visitSelect(final SelectContext ctx) { SelectStatement result = (SelectStatement) visit(ctx.unionClause()); result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitUnionClause(final UnionClauseContext ctx) { return visit(ctx.selectClause(0)); } @Override public ASTNode visitSelectClause(final SelectClauseContext ctx) { SelectStatement result = new SelectStatement(); ProjectionsSegment projections = (ProjectionsSegment) visit(ctx.projections()); result.setProjections(projections); result.getAllSQLSegments().add(projections); if (null != ctx.selectSpecification()) { result.getProjections().setDistinctRow(isDistinct(ctx)); } if (null != ctx.fromClause()) { ListValue<TableSegment> tables = (ListValue<TableSegment>) visit(ctx.fromClause()); result.getTables().addAll(tables.getValues()); result.getAllSQLSegments().addAll(tables.getValues()); } if (null != ctx.whereClause()) { WhereSegment where = (WhereSegment) visit(ctx.whereClause()); result.setWhere(where); result.getAllSQLSegments().add(where); } if (null != ctx.orderByClause()) { OrderBySegment orderBy = (OrderBySegment) visit(ctx.orderByClause()); result.setOrderBy(orderBy); result.getAllSQLSegments().add(orderBy); } return result; } @Override public ASTNode visitSelectSpecification(final SelectSpecificationContext ctx) { if (null != ctx.duplicateSpecification()) { return visit(ctx.duplicateSpecification()); } return new BooleanValue(false); } @Override public ASTNode visitDuplicateSpecification(final DuplicateSpecificationContext ctx) { String text = ctx.getText(); if ("DISTINCT".equalsIgnoreCase(text) || "DISTINCTROW".equalsIgnoreCase(text)) { return new BooleanValue(true); } return new BooleanValue(false); } @Override public ASTNode visitProjections(final ProjectionsContext ctx) { Collection<ProjectionSegment> projections = new LinkedList<>(); if (null != ctx.unqualifiedShorthand()) { projections.add( new ShorthandProjectionSegment(ctx.unqualifiedShorthand().getStart().getStartIndex(), ctx.unqualifiedShorthand().getStop().getStopIndex(), ctx.unqualifiedShorthand().getText())); } for (ProjectionContext each : ctx.projection()) { projections.add((ProjectionSegment) visit(each)); } ProjectionsSegment result = new ProjectionsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.getProjections().addAll(projections); return result; } @Override public ASTNode visitProjection(final ProjectionContext ctx) { if (null != ctx.qualifiedShorthand()) { QualifiedShorthandContext shorthand = ctx.qualifiedShorthand(); ShorthandProjectionSegment result = new ShorthandProjectionSegment(shorthand.getStart().getStartIndex(), shorthand.getStop().getStopIndex(), shorthand.getText()); result.setOwner(new TableSegment(shorthand.identifier().getStart().getStartIndex(), shorthand.identifier().getStop().getStopIndex(), shorthand.identifier().getText())); return result; } String alias = null == ctx.alias() ? null : ctx.alias().getText(); if (null != ctx.columnName()) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); ColumnProjectionSegment result = new ColumnProjectionSegment(ctx.columnName().getText(), column); result.setAlias(alias); return result; } LiteralExpressionSegment column = (LiteralExpressionSegment) visit(ctx.expr()); ExpressionProjectionSegment result = Strings.isNullOrEmpty(alias) ? new ExpressionProjectionSegment(column.getStartIndex(), column.getStopIndex(), String.valueOf(column.getLiterals())) : new ExpressionProjectionSegment(column.getStartIndex(), ctx.alias().stop.getStopIndex(), String.valueOf(column.getLiterals())); result.setAlias(alias); return result; } @Override public ASTNode visitFromClause(final FromClauseContext ctx) { return visit(ctx.tableReferences()); } @Override public ASTNode visitTableReferences(final TableReferencesContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); for (EscapedTableReferenceContext each : ctx.escapedTableReference()) { result.combine((ListValue<TableSegment>) visit(each)); } return result; } @Override public ASTNode visitEscapedTableReference(final EscapedTableReferenceContext ctx) { return visit(ctx.tableReference()); } @Override public ASTNode visitTableReference(final TableReferenceContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); if (null != ctx.joinedTable()) { for (JoinedTableContext each : ctx.joinedTable()) { result.getValues().add((TableSegment) visit(each)); } } if (null != ctx.tableFactor()) { result.getValues().add((TableSegment) visit(ctx.tableFactor())); } return result; } @Override public ASTNode visitTableFactor(final TableFactorContext ctx) { if (null != ctx.tableReferences()) { return visit(ctx.tableReferences()); } TableSegment table = (TableSegment) visit(ctx.tableName()); if (null != ctx.alias()) { table.setAlias(ctx.alias().getText()); } return table; } @Override public ASTNode visitJoinedTable(final JoinedTableContext ctx) { return visit(ctx.tableFactor()); } @Override public ASTNode visitWhereClause(final WhereClauseContext ctx) { WhereSegment result = new WhereSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.setParameterMarkerStartIndex(currentParameterIndex); ASTNode segment = visit(ctx.expr()); if (segment instanceof OrPredicateSegment) { result.getAndPredicates().addAll(((OrPredicateSegment) segment).getAndPredicates()); } else if (segment instanceof PredicateSegment) { AndPredicate andPredicate = new AndPredicate(); andPredicate.getPredicates().add((PredicateSegment) segment); result.getAndPredicates().add(andPredicate); } result.setParametersCount(currentParameterIndex); return result; } @Override public ASTNode visitSetTransaction(final SetTransactionContext ctx) { return new SetTransactionStatement(); } @Override public ASTNode visitSetAutoCommit(final SetAutoCommitContext ctx) { SetAutoCommitStatement result = new SetAutoCommitStatement(); AutoCommitValueContext autoCommitValueContext = ctx.autoCommitValue(); if (null != autoCommitValueContext) { AutoCommitSegment autoCommitSegment = (AutoCommitSegment) visit(ctx.autoCommitValue()); result.getAllSQLSegments().add(autoCommitSegment); result.setAutoCommit(autoCommitSegment.isAutoCommit()); } return result; } @Override public ASTNode visitAutoCommitValue(final AutoCommitValueContext ctx) { boolean autoCommit = "1".equals(ctx.getText()) || "ON".equals(ctx.getText()); return new AutoCommitSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), autoCommit); } @Override public ASTNode visitBeginTransaction(final BeginTransactionContext ctx) { return new BeginTransactionStatement(); } @Override public ASTNode visitCommit(final CommitContext ctx) { return new CommitStatement(); } @Override public ASTNode visitRollback(final RollbackContext ctx) { return new RollbackStatement(); } @Override public ASTNode visitSavepoint(final SavepointContext ctx) { return new SavepointStatement(); } @Override public ASTNode visitSchemaName(final SchemaNameContext ctx) { return visit(ctx.identifier()); } @Override public ASTNode visitTableNames(final TableNamesContext ctx) { ListValue<TableSegment> result = new ListValue<>(new LinkedList<TableSegment>()); for (TableNameContext each : ctx.tableName()) { result.getValues().add((TableSegment) visit(each)); } return result; } @Override public ASTNode visitTableName(final TableNameContext ctx) { LiteralValue tableName = (LiteralValue) visit(ctx.name()); TableSegment result = new TableSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), tableName.getLiteral()); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(createSchemaSegment(owner)); } return result; } @Override public ASTNode visitColumnNames(final ColumnNamesContext ctx) { Collection<ColumnSegment> segments = new LinkedList<>(); for (ColumnNameContext each : ctx.columnName()) { segments.add((ColumnSegment) visit(each)); } InsertColumnsSegment result = new InsertColumnsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.getColumns().addAll(segments); return result; } @Override public ASTNode visitColumnName(final ColumnNameContext ctx) { LiteralValue columnName = (LiteralValue) visit(ctx.name()); ColumnSegment result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnName.getLiteral()); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(createTableSegment(owner)); } return result; } @Override public ASTNode visitIndexName(final IndexNameContext ctx) { LiteralValue indexName = (LiteralValue) visit(ctx.identifier()); return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName.getLiteral()); } @Override public ASTNode visitDataTypeName_(final DataTypeName_Context ctx) { return visit(ctx.identifier(0)); } @Override public ASTNode visitExpr(final ExprContext ctx) { BooleanPrimaryContext bool = ctx.booleanPrimary(); if (null != bool) { return visit(bool); } else if (null != ctx.logicalOperator()) { return mergePredicateSegment(visit(ctx.expr(0)), visit(ctx.expr(1)), ctx.logicalOperator().getText()); } else if (!ctx.expr().isEmpty()) { return visit(ctx.expr(0)); } return createExpressionSegment(new LiteralValue(ctx.getText()), ctx); } @Override public ASTNode visitBooleanPrimary(final BooleanPrimaryContext ctx) { if (null != ctx.subquery()) { return new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.subquery().getText()); } if (null != ctx.comparisonOperator()) { return createCompareSegment(ctx); } if (null != ctx.predicate()) { return visit(ctx.predicate()); } return createExpressionSegment(new LiteralValue(ctx.getText()), ctx); } @Override public ASTNode visitPredicate(final PredicateContext ctx) { if (null != ctx.subquery()) { return new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.subquery().getText()); } if (null != ctx.IN()) { return createInSegment(ctx); } if (null != ctx.BETWEEN()) { return createBetweenSegment(ctx); } BitExprContext bitExpr = ctx.bitExpr(0); if (null != bitExpr) { return createExpressionSegment(visit(bitExpr), ctx); } return createExpressionSegment(new LiteralValue(ctx.getText()), ctx); } @Override public ASTNode visitBitExpr(final BitExprContext ctx) { SimpleExprContext simple = ctx.simpleExpr(); if (null != simple) { return visit(simple); } return new LiteralValue(ctx.getText()); } @Override public ASTNode visitSimpleExpr(final SimpleExprContext ctx) { if (null != ctx.subquery()) { return new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.subquery().getText()); } if (null != ctx.parameterMarker()) { return visit(ctx.parameterMarker()); } if (null != ctx.literals()) { return visit(ctx.literals()); } if (null != ctx.intervalExpression()) { return visit(ctx.intervalExpression()); } if (null != ctx.functionCall()) { return visit(ctx.functionCall()); } if (null != ctx.columnName()) { return visit(ctx.columnName()); } return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitParameterMarker(final ParameterMarkerContext ctx) { return new ParameterMarkerValue(currentParameterIndex++); } @Override public ASTNode visitLiterals(final LiteralsContext ctx) { if (null != ctx.stringLiterals()) { return visit(ctx.stringLiterals()); } if (null != ctx.numberLiterals()) { return visit(ctx.numberLiterals()); } if (null != ctx.booleanLiterals()) { return visit(ctx.booleanLiterals()); } if (null != ctx.nullValueLiterals()) { return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } return new LiteralValue(ctx.getText()); } @Override public ASTNode visitStringLiterals(final StringLiteralsContext ctx) { String text = ctx.getText(); return new LiteralValue(text.substring(1, text.length() - 1)); } @Override public ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { return new NumberValue(ctx.getText()); } @Override public ASTNode visitBooleanLiterals(final BooleanLiteralsContext ctx) { return new BooleanValue(ctx.getText()); } @Override public ASTNode visitIntervalExpression(final IntervalExpressionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitOrderByClause(final OrderByClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (OrderByItemContext each : ctx.orderByItem()) { items.add((OrderByItemSegment) visit(each)); } return new OrderBySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), items); } @Override public ASTNode visitOrderByItem(final OrderByItemContext ctx) { OrderDirection orderDirection = null != ctx.DESC() ? OrderDirection.DESC : OrderDirection.ASC; if (null != ctx.columnName()) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); return new ColumnOrderByItemSegment(column, orderDirection); } if (null != ctx.numberLiterals()) { return new IndexOrderByItemSegment(ctx.numberLiterals().getStart().getStartIndex(), ctx.numberLiterals().getStop().getStopIndex(), SQLUtil.getExactlyNumber(ctx.numberLiterals().getText(), 10).intValue(), orderDirection); } return new ExpressionOrderByItemSegment(ctx.expr().getStart().getStartIndex(), ctx.expr().getStop().getStopIndex(), ctx.expr().getText(), orderDirection); } @Override public ASTNode visitFunctionCall(final FunctionCallContext ctx) { if (null != ctx.aggregationFunction()) { return visit(ctx.aggregationFunction()); } if (null != ctx.regularFunction()) { return visit(ctx.regularFunction()); } if (null != ctx.specialFunction()) { return visit(ctx.specialFunction()); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitAggregationFunction(final AggregationFunctionContext ctx) { if (AggregationType.isAggregationType(ctx.aggregationFunctionName_().getText())) { return createAggregationSegment(ctx); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitSpecialFunction(final SpecialFunctionContext ctx) { if (null != ctx.groupConcatFunction()) { return visit(ctx.groupConcatFunction()); } if (null != ctx.windowFunction()) { return visit(ctx.windowFunction()); } if (null != ctx.castFunction()) { return visit(ctx.castFunction()); } if (null != ctx.convertFunction()) { return visit(ctx.convertFunction()); } if (null != ctx.positionFunction()) { return visit(ctx.positionFunction()); } if (null != ctx.substringFunction()) { return visit(ctx.substringFunction()); } if (null != ctx.extractFunction()) { return visit(ctx.extractFunction()); } if (null != ctx.charFunction()) { return visit(ctx.charFunction()); } if (null != ctx.weightStringFunction()) { return visit(ctx.weightStringFunction()); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitGroupConcatFunction(final GroupConcatFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitWindowFunction(final WindowFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitCastFunction(final CastFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitConvertFunction(final ConvertFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitPositionFunction(final PositionFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitSubstringFunction(final SubstringFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitExtractFunction(final ExtractFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitCharFunction(final CharFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitWeightStringFunction(final WeightStringFunctionContext ctx) { calculateParameterCount(Collections.singleton(ctx.expr())); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitRegularFunction(final RegularFunctionContext ctx) { calculateParameterCount(ctx.expr()); return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWord_Context unreservedWord = ctx.unreservedWord_(); if (null != unreservedWord) { return visit(unreservedWord); } return new LiteralValue(ctx.getText()); } @Override public ASTNode visitUnreservedWord_(final UnreservedWord_Context ctx) { return new LiteralValue(ctx.getText()); } private SchemaSegment createSchemaSegment(final OwnerContext ownerContext) { LiteralValue literalValue = (LiteralValue) visit(ownerContext.identifier()); return new SchemaSegment(ownerContext.getStart().getStartIndex(), ownerContext.getStop().getStopIndex(), literalValue.getLiteral()); } private TableSegment createTableSegment(final OwnerContext ownerContext) { LiteralValue literalValue = (LiteralValue) visit(ownerContext.identifier()); return new TableSegment(ownerContext.getStart().getStartIndex(), ownerContext.getStop().getStopIndex(), literalValue.getLiteral()); } private ASTNode createExpressionSegment(final ASTNode astNode, final ParserRuleContext context) { if (astNode instanceof LiteralValue) { return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((LiteralValue) astNode).getLiteral()); } if (astNode instanceof NumberValue) { return new LiteralExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((NumberValue) astNode).getNumber()); } if (astNode instanceof ParameterMarkerValue) { return new ParameterMarkerExpressionSegment(context.start.getStartIndex(), context.stop.getStopIndex(), ((ParameterMarkerValue) astNode).getParameterIndex()); } return astNode; } private ColumnDefinitionSegment createColumnDefinitionSegment(final ColumnDefinitionContext columnDefinition, final DDLStatement statement) { ColumnSegment column = (ColumnSegment) visit(columnDefinition.columnName()); LiteralValue dataType = (LiteralValue) visit(columnDefinition.dataType().dataTypeName_()); boolean isPrimaryKey = false; for (InlineDataType_Context inlineDataType : columnDefinition.inlineDataType_()) { CommonDataTypeOption_Context commonDataTypeOption = inlineDataType.commonDataTypeOption_(); if (null != commonDataTypeOption) { if (null != commonDataTypeOption.primaryKey()) { isPrimaryKey = true; } if (null != commonDataTypeOption.referenceDefinition_()) { statement.getAllSQLSegments().add((TableSegment) visit(commonDataTypeOption.referenceDefinition_().tableName())); } } } for (GeneratedDataType_Context generatedDataType: columnDefinition.generatedDataType_()) { CommonDataTypeOption_Context commonDataTypeOption = generatedDataType.commonDataTypeOption_(); if (null != commonDataTypeOption) { if (null != commonDataTypeOption.primaryKey()) { isPrimaryKey = true; } if (null != commonDataTypeOption.referenceDefinition_()) { statement.getAllSQLSegments().add((TableSegment) visit(commonDataTypeOption.referenceDefinition_().tableName())); } } } return new ColumnDefinitionSegment(column.getStartIndex(), column.getStopIndex(), column.getName(), dataType.getLiteral(), isPrimaryKey); } private void createColumnPositionSegment(final FirstOrAfterColumnContext firstOrAfterColumn, final ColumnDefinitionSegment columnDefinition, final AlterTableStatement statement) { if (null != firstOrAfterColumn) { ColumnPositionSegment columnPositionSegment = null; if (null != firstOrAfterColumn.FIRST()) { columnPositionSegment = new ColumnFirstPositionSegment(columnDefinition.getStartIndex(), columnDefinition.getStopIndex(), columnDefinition.getColumnName()); } else if (null != firstOrAfterColumn.AFTER()) { ColumnSegment afterColumn = (ColumnSegment) visit(firstOrAfterColumn.columnName()); columnPositionSegment = new ColumnAfterPositionSegment(columnDefinition.getStartIndex(), columnDefinition.getStopIndex(), columnDefinition.getColumnName(), afterColumn.getName()); } statement.getChangedPositionColumns().add(columnPositionSegment); statement.getAllSQLSegments().add(columnPositionSegment); } } private Collection<InsertValuesSegment> createInsertValuesSegments(final Collection<AssignmentValuesContext> assignmentValuesContexts) { Collection<InsertValuesSegment> result = new LinkedList<>(); for (AssignmentValuesContext each : assignmentValuesContexts) { result.add((InsertValuesSegment) visit(each)); } return result; } private ASTNode createAggregationSegment(final AggregationFunctionContext ctx) { AggregationType type = AggregationType.valueOf(ctx.aggregationFunctionName_().getText()); int innerExpressionStartIndex = ((TerminalNode) ctx.getChild(1)).getSymbol().getStartIndex(); if (null != ctx.distinct()) { return new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText(), type, innerExpressionStartIndex, getDistinctExpression(ctx)); } return new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText(), type, innerExpressionStartIndex); } private String getDistinctExpression(final AggregationFunctionContext ctx) { StringBuilder result = new StringBuilder(); for (int i = 3; i < ctx.getChildCount() - 1; i++) { result.append(ctx.getChild(i).getText()); } return result.toString(); } private PredicateSegment createCompareSegment(final BooleanPrimaryContext ctx) { ASTNode leftValue = visit(ctx.booleanPrimary()); ASTNode rightValue = visit(ctx.predicate()); if (rightValue instanceof ColumnSegment) { return new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (ColumnSegment) leftValue, (ColumnSegment) rightValue); } return new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (ColumnSegment) leftValue, new PredicateCompareRightValue(ctx.comparisonOperator().getText(), (ExpressionSegment) rightValue)); } private PredicateSegment createInSegment(final PredicateContext ctx) { ColumnSegment column = (ColumnSegment) visit(ctx.bitExpr(0)); Collection<ExpressionSegment> segments = Lists.transform(ctx.expr(), new Function<ExprContext, ExpressionSegment>() { @Override public ExpressionSegment apply(final ExprContext input) { return (ExpressionSegment) visit(input); } }); return new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, new PredicateInRightValue(segments)); } private PredicateSegment createBetweenSegment(final PredicateContext ctx) { ColumnSegment column = (ColumnSegment) visit(ctx.bitExpr(0)); ExpressionSegment between = (ExpressionSegment) visit(ctx.bitExpr(1)); ExpressionSegment and = (ExpressionSegment) visit(ctx.predicate()); return new PredicateSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, new PredicateBetweenRightValue(between, and)); } private OrPredicateSegment mergePredicateSegment(final ASTNode left, final ASTNode right, final String operator) { Optional<LogicalOperator> logicalOperator = LogicalOperator.valueFrom(operator); Preconditions.checkState(logicalOperator.isPresent()); if (LogicalOperator.OR == logicalOperator.get()) { return mergeOrPredicateSegment(left, right); } return mergeAndPredicateSegment(left, right); } private OrPredicateSegment mergeOrPredicateSegment(final ASTNode left, final ASTNode right) { OrPredicateSegment result = new OrPredicateSegment(); result.getAndPredicates().addAll(getAndPredicates(left)); result.getAndPredicates().addAll(getAndPredicates(right)); return result; } private OrPredicateSegment mergeAndPredicateSegment(final ASTNode left, final ASTNode right) { OrPredicateSegment result = new OrPredicateSegment(); for (AndPredicate eachLeft : getAndPredicates(left)) { for (AndPredicate eachRight : getAndPredicates(right)) { result.getAndPredicates().add(createAndPredicate(eachLeft, eachRight)); } } return result; } private AndPredicate createAndPredicate(final AndPredicate left, final AndPredicate right) { AndPredicate result = new AndPredicate(); result.getPredicates().addAll(left.getPredicates()); result.getPredicates().addAll(right.getPredicates()); return result; } private Collection<AndPredicate> getAndPredicates(final ASTNode astNode) { if (astNode instanceof OrPredicateSegment) { return ((OrPredicateSegment) astNode).getAndPredicates(); } if (astNode instanceof AndPredicate) { return Collections.singleton((AndPredicate) astNode); } AndPredicate andPredicate = new AndPredicate(); andPredicate.getPredicates().add((PredicateSegment) astNode); return Collections.singleton(andPredicate); } private boolean isDistinct(final SelectClauseContext ctx) { for (SelectSpecificationContext each : ctx.selectSpecification()) { boolean eachDistinct = ((BooleanValue) visit(each)).isCorrect(); if (eachDistinct) { return true; } } return false; } private void calculateParameterCount(final Collection<ExprContext> exprContexts) { for (ExprContext each : exprContexts) { visit(each); } } }
maybe a enum is better than a boolean
public SortItems genSortItems(SortItemContext ctx) { boolean orderDirection = true; if (ctx.DESC() != null) { orderDirection = false; } else { orderDirection = true; } Expression expression = typedVisit(ctx.expression()); NamedExpression namedExpression; if (expression instanceof NamedExpression) { namedExpression = (NamedExpression) expression; } else { namedExpression = new UnboundAlias(expression); } return new SortItems(namedExpression, orderDirection); }
orderDirection = false;
public SortItems genSortItems(SortItemContext ctx) { OrderDirection orderDirection; if (ctx.DESC() != null) { orderDirection = OrderDirection.DESC; } else { orderDirection = OrderDirection.ASC; } Expression expression = typedVisit(ctx.expression()); return new SortItems(expression, orderDirection); }
class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> { /** * Create a logical plan using a where clause. */ private final BiFunction<WhereClauseContext, LogicalPlan, LogicalPlan> withWhereClause = (WhereClauseContext ctx, LogicalPlan plan) -> new LogicalUnaryPlan(new LogicalFilter(expression((ctx.booleanExpression()))), plan); protected <T> T typedVisit(ParseTree ctx) { return (T) ctx.accept(this); } /** * Override the default behavior for all visit methods. This will only return a non-null result * when the context has only one child. This is done because there is no generic method to * combine the results of the context children. In all other cases null is returned. */ @Override public Object visitChildren(RuleNode node) { if (node.getChildCount() == 1) { return node.getChild(0).accept(this); } else { return null; } } @Override public LogicalPlan visitSingleStatement(SingleStatementContext ctx) { Supplier<LogicalPlan> f = () -> (LogicalPlan) visit(ctx.statement()); return ParserUtils.withOrigin(ctx, f); } /* ******************************************************************************************** * Plan parsing * ******************************************************************************************** */ private LogicalPlan plan(ParserRuleContext tree) { return (LogicalPlan) tree.accept(this); } @Override public LogicalPlan visitQuery(QueryContext ctx) { Supplier<LogicalPlan> f = () -> { LogicalPlan query = plan(ctx.queryTerm()); LogicalPlan queryOrganization = withQueryOrganization(ctx.queryOrganization(), query); return queryOrganization; }; return ParserUtils.withOrigin(ctx, f); } private LogicalPlan withQueryOrganization(QueryOrganizationContext ctx, LogicalPlan children) { List<SortItems> sortItems = visitQueryOrganization(ctx); return sortItems == null ? children : new LogicalUnaryPlan(new LogicalSort(sortItems), children); } @Override public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationContext ctx) { Supplier<LogicalPlan> f = () -> { LogicalPlan from = visitFromClause(ctx.fromClause()); return withSelectQuerySpecification( ctx, ctx.selectClause(), ctx.whereClause(), from, ctx.aggClause()); }; return ParserUtils.withOrigin(ctx, f); } @Override public Expression visitExpression(ExpressionContext ctx) { Supplier<Expression> f = () -> (Expression) visit(ctx.booleanExpression()); return ParserUtils.withOrigin(ctx, f); } @Override public List<Expression> visitNamedExpressionSeq(NamedExpressionSeqContext ctx) { List<Expression> expressions = Lists.newArrayList(); if (ctx != null) { for (NamedExpressionContext namedExpressionContext : ctx.namedExpression()) { Expression namedExpression = typedVisit(namedExpressionContext); expressions.add(namedExpression); } } return expressions; } /** * Add a regular (SELECT) query specification to a logical plan. The query specification * is the core of the logical plan, this is where sourcing (FROM clause), projection (SELECT), * aggregation (GROUP BY ... HAVING ...) and filtering (WHERE) takes place. * * <p>Note that query hints are ignored (both by the parser and the builder). */ private LogicalPlan withSelectQuerySpecification( ParserRuleContext ctx, SelectClauseContext selectClause, WhereClauseContext whereClause, LogicalPlan relation, AggClauseContext aggClause) { Supplier<LogicalPlan> f = () -> { LogicalPlan plan = visitCommonSelectQueryClausePlan( relation, visitNamedExpressionSeq(selectClause.namedExpressionSeq()), whereClause, aggClause); return plan; }; return ParserUtils.withOrigin(ctx, f); } private LogicalPlan visitCommonSelectQueryClausePlan( LogicalPlan relation, List<Expression> expressions, WhereClauseContext whereClause, AggClauseContext aggClause) { LogicalPlan withFilter = relation.optionalMap(whereClause, withWhereClause); List<NamedExpression> namedExpressions = expressions.stream().map(expression -> { if (expression instanceof NamedExpression) { return (NamedExpression) expression; } else { return new UnboundAlias(expression); } }).collect(Collectors.toList()); LogicalPlan withProject; if (CollectionUtils.isNotEmpty(namedExpressions)) { withProject = new LogicalUnaryPlan(new LogicalProject(namedExpressions), withFilter); } else { withProject = withFilter; } LogicalPlan withAgg; if (aggClause != null) { withAgg = withAggClause(expressions, aggClause.groupByItem(), withProject); } else { withAgg = withProject; } return withAgg; } @Override public LogicalPlan visitFromClause(FromClauseContext ctx) { LogicalPlan left = null; for (RelationContext relation : ctx.relation()) { LogicalPlan right = plan(relation.relationPrimary()); if (left == null) { left = right; } else { left = new LogicalBinaryPlan( new LogicalJoin(JoinType.INNER_JOIN, Optional.empty()), left, right); } left = withJoinRelations(left, relation); } return left; } /** * Join one more [[LogicalPlan]]s to the current logical plan. */ private LogicalPlan withJoinRelations(LogicalPlan base, RelationContext ctx) { LogicalPlan last = base; for (JoinRelationContext join : ctx.joinRelation()) { JoinType joinType; if (join.joinType().LEFT() != null) { joinType = JoinType.LEFT_OUTER_JOIN; } else if (join.joinType().RIGHT() != null) { joinType = JoinType.RIGHT_OUTER_JOIN; } else if (join.joinType().FULL() != null) { joinType = JoinType.FULL_OUTER_JOIN; } else if (join.joinType().SEMI() != null) { joinType = JoinType.LEFT_SEMI_JOIN; } else if (join.joinType().ANTI() != null) { joinType = JoinType.LEFT_ANTI_JOIN; } else if (join.joinType().CROSS() != null) { joinType = JoinType.CROSS_JOIN; } else { joinType = JoinType.INNER_JOIN; } JoinCriteriaContext joinCriteria = join.joinCriteria(); Expression condition; if (joinCriteria == null) { condition = null; } else { condition = expression(joinCriteria.booleanExpression()); } last = new LogicalBinaryPlan( new LogicalJoin(joinType, Optional.ofNullable(condition)), last, plan(join.relationPrimary()) ); } return last; } private LogicalPlan withAggClause(List<Expression> expressions, GroupByItemContext ctx, LogicalPlan aggClause) { List<Expression> tmpExpressions = new ArrayList<>(); for (ExpressionContext expressionCtx : ctx.expression()) { tmpExpressions.add(typedVisit(expressionCtx)); } List<NamedExpression> groupByExpressions = tmpExpressions.stream().map(expression -> { if (expression instanceof NamedExpression) { return (NamedExpression) expression; } else { return new UnboundAlias(expression); } }).collect(Collectors.toList()); List<Expression> aggExpressions = new ArrayList<>(); for (Expression expression : expressions) { if (expression instanceof FunctionCall) { aggExpressions.add(expression); } } return new LogicalUnaryPlan(new LogicalAggregation(groupByExpressions, aggExpressions), aggClause); } /** * Generate sortItems. * * @param ctx SortItemContext * @return SortItems */ /** * Create SortItems list. * * @param ctx QueryOrganizationContext * @return List of SortItems */ public List<SortItems> visitQueryOrganization(QueryOrganizationContext ctx) { List<SortItems> sortItems = new ArrayList<>(); if (ctx.sortClause().ORDER() != null) { for (SortItemContext sortItemContext : ctx.sortClause().sortItem()) { sortItems.add(genSortItems(sortItemContext)); } return new ArrayList<>(sortItems); } else { return null; } } /** * Create an aliased table reference. This is typically used in FROM clauses. */ @Override public LogicalPlan visitTableName(TableNameContext ctx) { List<String> tableId = visitMultipartIdentifier(ctx.multipartIdentifier()); UnboundRelation relation = new UnboundRelation(tableId); return new LogicalLeafPlan(relation); } /** * Create a Sequence of Strings for a parenthesis enclosed alias list. */ @Override public List<String> visitIdentifierList(IdentifierListContext ctx) { return visitIdentifierSeq(ctx.identifierSeq()); } /** * Create a Sequence of Strings for an identifier list. */ @Override public List<String> visitIdentifierSeq(IdentifierSeqContext ctx) { return ctx.ident.stream().map(RuleContext::getText).collect(Collectors.toList()); } /* ******************************************************************************************** * Table Identifier parsing * ******************************************************************************************** */ @Override public List<String> visitMultipartIdentifier(MultipartIdentifierContext ctx) { return ctx.parts.stream().map(RuleContext::getText).collect(Collectors.toList()); } /* ******************************************************************************************** * Expression parsing * ******************************************************************************************** */ /** * Create an expression from the given context. This method just passes the context on to the * visitor and only takes care of typing (We assume that the visitor returns an Expression here). */ private Expression expression(ParserRuleContext ctx) { return typedVisit(ctx); } /** * Create a star (i.e. all) expression; this selects all elements (in the specified object). * Both un-targeted (global) and targeted aliases are supported. */ @Override public Expression visitStar(StarContext ctx) { Supplier<Expression> f = () -> { final QualifiedNameContext qualifiedNameContext = ctx.qualifiedName(); List<String> target; if (qualifiedNameContext != null) { target = qualifiedNameContext.identifier().stream() .map(RuleContext::getText).collect(Collectors.toList()); } else { target = Lists.newArrayList(); } return new UnboundStar(target); }; return ParserUtils.withOrigin(ctx, f); } /** * Create an aliased expression if an alias is specified. Both single and multi-aliases are * supported. */ @Override public Expression visitNamedExpression(NamedExpressionContext ctx) { final Expression expression = expression(ctx.expression()); if (ctx.name != null) { return new Alias(expression, ctx.name.getText()); } else { return expression; } } /** * Create a comparison expression. This compares two expressions. The following comparison * operators are supported: * - Equal: '=' or '==' * - Null-safe Equal: '<=>' * - Not Equal: '<>' or '!=' * - Less than: '<' * - Less then or Equal: '<=' * - Greater than: '>' * - Greater then or Equal: '>=' */ @Override public Expression visitComparison(ComparisonContext ctx) { Expression left = expression(ctx.left); Expression right = expression(ctx.right); TerminalNode operator = (TerminalNode) ctx.comparisonOperator().getChild(0); switch (operator.getSymbol().getType()) { case DorisParser.EQ: return new EqualTo(left, right); case DorisParser.NEQ: return new Not(new EqualTo(left, right)); case DorisParser.LT: return new LessThan(left, right); case DorisParser.GT: return new GreaterThan(left, right); case DorisParser.LTE: return new LessThanEqual(left, right); case DorisParser.GTE: return new GreaterThanEqual(left, right); case DorisParser.NSEQ: return new NullSafeEqual(left, right); default: return null; } } /** * Create a not expression. * format: NOT Expression * for example: * not 1 * not 1=1 */ @Override public Expression visitLogicalNot(LogicalNotContext ctx) { Expression child = expression(ctx.booleanExpression()); return new Not(child); } @Override public Expression visitLogicalBinary(LogicalBinaryContext ctx) { Expression left = expression(ctx.left); Expression right = expression(ctx.right); return new CompoundPredicate(genNodeType(ctx.operator), left, right); } private static NodeType genNodeType(Token token) { switch (token.getType()) { case DorisParser.AND: return NodeType.AND; case DorisParser.OR: return NodeType.OR; default: return null; } } /** * Create a predicated expression. A predicated expression is a normal expression with a * predicate attached to it, for example: * {{{ * a + 1 IS NULL * }}} */ @Override public Expression visitPredicated(PredicatedContext ctx) { Expression e = expression(ctx.valueExpression()); if (ctx.predicate() != null) { return withPredicate(ctx.predicate(), e); } return e; } /** * match predicate type and generate different predicates. * * @param ctx PredicateContext * @param e Expression * @return Expression */ public Expression withPredicate(PredicateContext ctx, Expression e) { switch (ctx.kind.getType()) { case DorisParser.BETWEEN: return withBetween(ctx, e); default: return null; } } /** * Generate between predicate. * * @param ctx PredicateContext * @param e Expression * @return Expression */ public Expression withBetween(PredicateContext ctx, Expression e) { boolean isNotBetween = ctx.NOT() != null ? true : false; BetweenPredicate betweenPredicate = new BetweenPredicate( e, expression(ctx.lower), expression(ctx.upper) ); return isNotBetween ? new Not(betweenPredicate) : betweenPredicate; } @Override public Expression visitArithmeticUnary(ArithmeticUnaryContext ctx) { Expression e = expression(ctx); switch (ctx.operator.getType()) { case DorisParser.PLUS: return e; case DorisParser.MINUS: default: return null; } } @Override public Expression visitArithmeticBinary(ArithmeticBinaryContext ctx) { Expression left = expression(ctx.left); Expression right = expression(ctx.right); return new Arithmetic(genArithmeticOperator(ctx.operator), left, right); } private static Arithmetic.Operator genArithmeticOperator(Token token) { switch (token.getType()) { case DorisParser.ASTERISK: return Operator.MULTIPLY; case DorisParser.SLASH: return Operator.DIVIDE; case DorisParser.PERCENT: return Operator.MOD; case DorisParser.PLUS: return Operator.ADD; case DorisParser.MINUS: return Operator.SUBTRACT; default: return null; } } @Override public Expression visitAggFunctions(AggFunctionsContext ctx) { String functionName = ""; if (ctx.aggFunction().SUM() != null) { functionName = "sum"; } else if (ctx.aggFunction().AVG() != null) { functionName = "avg"; } return new FunctionCall(functionName, new FunctionParams(ctx.aggFunction().DISTINCT() != null, expression(ctx.aggFunction().expression()))); } @Override public Expression visitDereference(DereferenceContext ctx) { Expression e = expression(ctx.base); if (e instanceof UnboundSlot) { UnboundSlot unboundAttribute = (UnboundSlot) e; List<String> nameParts = Lists.newArrayList(unboundAttribute.getNameParts()); nameParts.add(ctx.fieldName.getText()); return new UnboundSlot(nameParts); } else { return null; } } @Override public UnboundSlot visitColumnReference(ColumnReferenceContext ctx) { return UnboundSlot.quoted(ctx.getText()); } /** * Create a NULL literal expression. */ @Override public Expression visitNullLiteral(NullLiteralContext ctx) { return new Literal(null); } @Override public Literal visitBooleanLiteral(BooleanLiteralContext ctx) { Boolean b = Boolean.valueOf(ctx.getText()); return new Literal(b); } @Override public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { Integer l = Integer.valueOf(ctx.getText()); return new Literal(l); } @Override public Literal visitStringLiteral(StringLiteralContext ctx) { String s = ctx.STRING().stream().map(ParseTree::getText).reduce((s1, s2) -> s1 + s2).orElse(""); return new Literal(s); } }
class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> { /** * Create a logical plan using a where clause. */ private final BiFunction<WhereClauseContext, LogicalPlan, LogicalPlan> withWhereClause = (WhereClauseContext ctx, LogicalPlan plan) -> new LogicalUnaryPlan(new LogicalFilter(expression((ctx.booleanExpression()))), plan); protected <T> T typedVisit(ParseTree ctx) { return (T) ctx.accept(this); } /** * Override the default behavior for all visit methods. This will only return a non-null result * when the context has only one child. This is done because there is no generic method to * combine the results of the context children. In all other cases null is returned. */ @Override public Object visitChildren(RuleNode node) { if (node.getChildCount() == 1) { return node.getChild(0).accept(this); } else { return null; } } @Override public LogicalPlan visitSingleStatement(SingleStatementContext ctx) { Supplier<LogicalPlan> f = () -> (LogicalPlan) visit(ctx.statement()); return ParserUtils.withOrigin(ctx, f); } /* ******************************************************************************************** * Plan parsing * ******************************************************************************************** */ private LogicalPlan plan(ParserRuleContext tree) { return (LogicalPlan) tree.accept(this); } @Override public LogicalPlan visitQuery(QueryContext ctx) { Supplier<LogicalPlan> f = () -> { LogicalPlan query = plan(ctx.queryTerm()); LogicalPlan queryOrganization = withQueryOrganization(ctx.queryOrganization(), query); return queryOrganization; }; return ParserUtils.withOrigin(ctx, f); } private LogicalPlan withQueryOrganization(QueryOrganizationContext ctx, LogicalPlan children) { List<SortItems> sortItems = visitQueryOrganization(ctx); return sortItems == null ? children : new LogicalUnaryPlan(new LogicalSort(sortItems), children); } @Override public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationContext ctx) { Supplier<LogicalPlan> f = () -> { LogicalPlan from = visitFromClause(ctx.fromClause()); return withSelectQuerySpecification( ctx, ctx.selectClause(), ctx.whereClause(), from, ctx.aggClause()); }; return ParserUtils.withOrigin(ctx, f); } @Override public Expression visitExpression(ExpressionContext ctx) { Supplier<Expression> f = () -> (Expression) visit(ctx.booleanExpression()); return ParserUtils.withOrigin(ctx, f); } @Override public List<Expression> visitNamedExpressionSeq(NamedExpressionSeqContext ctx) { List<Expression> expressions = Lists.newArrayList(); if (ctx != null) { for (NamedExpressionContext namedExpressionContext : ctx.namedExpression()) { Expression expression = typedVisit(namedExpressionContext); expressions.add(expression); } } return expressions; } /** * Add a regular (SELECT) query specification to a logical plan. The query specification * is the core of the logical plan, this is where sourcing (FROM clause), projection (SELECT), * aggregation (GROUP BY ... HAVING ...) and filtering (WHERE) takes place. * * <p>Note that query hints are ignored (both by the parser and the builder). */ private LogicalPlan withSelectQuerySpecification( ParserRuleContext ctx, SelectClauseContext selectClause, WhereClauseContext whereClause, LogicalPlan relation, AggClauseContext aggClause) { Supplier<LogicalPlan> f = () -> { LogicalPlan plan = visitCommonSelectQueryClausePlan( relation, visitNamedExpressionSeq(selectClause.namedExpressionSeq()), whereClause, aggClause); return plan; }; return ParserUtils.withOrigin(ctx, f); } private LogicalPlan visitCommonSelectQueryClausePlan( LogicalPlan relation, List<Expression> expressions, WhereClauseContext whereClause, AggClauseContext aggClause) { LogicalPlan withFilter = relation.optionalMap(whereClause, withWhereClause); List<NamedExpression> namedExpressions = expressions.stream().map(expression -> { if (expression instanceof NamedExpression) { return (NamedExpression) expression; } else { return new UnboundAlias(expression); } }).collect(Collectors.toList()); LogicalPlan withProject; if (CollectionUtils.isNotEmpty(namedExpressions)) { withProject = new LogicalUnaryPlan(new LogicalProject(namedExpressions), withFilter); } else { withProject = withFilter; } LogicalPlan withAgg; if (aggClause != null) { withAgg = withAggClause(namedExpressions, aggClause.groupByItem(), withFilter); } else { withAgg = withProject; } return withAgg; } @Override public LogicalPlan visitFromClause(FromClauseContext ctx) { LogicalPlan left = null; for (RelationContext relation : ctx.relation()) { LogicalPlan right = plan(relation.relationPrimary()); if (left == null) { left = right; } else { left = new LogicalBinaryPlan( new LogicalJoin(JoinType.INNER_JOIN, Optional.empty()), left, right); } left = withJoinRelations(left, relation); } return left; } /** * Join one more [[LogicalPlan]]s to the current logical plan. */ private LogicalPlan withJoinRelations(LogicalPlan base, RelationContext ctx) { LogicalPlan last = base; for (JoinRelationContext join : ctx.joinRelation()) { JoinType joinType; if (join.joinType().LEFT() != null) { joinType = JoinType.LEFT_OUTER_JOIN; } else if (join.joinType().RIGHT() != null) { joinType = JoinType.RIGHT_OUTER_JOIN; } else if (join.joinType().FULL() != null) { joinType = JoinType.FULL_OUTER_JOIN; } else if (join.joinType().SEMI() != null) { joinType = JoinType.LEFT_SEMI_JOIN; } else if (join.joinType().ANTI() != null) { joinType = JoinType.LEFT_ANTI_JOIN; } else if (join.joinType().CROSS() != null) { joinType = JoinType.CROSS_JOIN; } else { joinType = JoinType.INNER_JOIN; } JoinCriteriaContext joinCriteria = join.joinCriteria(); Expression condition; if (joinCriteria == null) { condition = null; } else { condition = expression(joinCriteria.booleanExpression()); } last = new LogicalBinaryPlan( new LogicalJoin(joinType, Optional.ofNullable(condition)), last, plan(join.relationPrimary()) ); } return last; } private LogicalPlan withAggClause(List<NamedExpression> aggExpressions, GroupByItemContext ctx, LogicalPlan aggClause) { List<Expression> tmpExpressions = new ArrayList<>(); for (ExpressionContext expressionCtx : ctx.expression()) { tmpExpressions.add(typedVisit(expressionCtx)); } return new LogicalUnaryPlan(new LogicalAggregation(tmpExpressions, aggExpressions), aggClause); } /** * Generate sortItems. * * @param ctx SortItemContext * @return SortItems */ /** * Create SortItems list. * * @param ctx QueryOrganizationContext * @return List of SortItems */ public List<SortItems> visitQueryOrganization(QueryOrganizationContext ctx) { List<SortItems> sortItems = new ArrayList<>(); if (ctx.sortClause().ORDER() != null) { for (SortItemContext sortItemContext : ctx.sortClause().sortItem()) { sortItems.add(genSortItems(sortItemContext)); } return new ArrayList<>(sortItems); } else { return null; } } /** * Create an aliased table reference. This is typically used in FROM clauses. */ @Override public LogicalPlan visitTableName(TableNameContext ctx) { List<String> tableId = visitMultipartIdentifier(ctx.multipartIdentifier()); UnboundRelation relation = new UnboundRelation(tableId); return new LogicalLeafPlan(relation); } /** * Create a Sequence of Strings for a parenthesis enclosed alias list. */ @Override public List<String> visitIdentifierList(IdentifierListContext ctx) { return visitIdentifierSeq(ctx.identifierSeq()); } /** * Create a Sequence of Strings for an identifier list. */ @Override public List<String> visitIdentifierSeq(IdentifierSeqContext ctx) { return ctx.ident.stream().map(RuleContext::getText).collect(Collectors.toList()); } /* ******************************************************************************************** * Table Identifier parsing * ******************************************************************************************** */ @Override public List<String> visitMultipartIdentifier(MultipartIdentifierContext ctx) { return ctx.parts.stream().map(RuleContext::getText).collect(Collectors.toList()); } /* ******************************************************************************************** * Expression parsing * ******************************************************************************************** */ /** * Create an expression from the given context. This method just passes the context on to the * visitor and only takes care of typing (We assume that the visitor returns an Expression here). */ private Expression expression(ParserRuleContext ctx) { return typedVisit(ctx); } /** * Create a star (i.e. all) expression; this selects all elements (in the specified object). * Both un-targeted (global) and targeted aliases are supported. */ @Override public Expression visitStar(StarContext ctx) { Supplier<Expression> f = () -> { final QualifiedNameContext qualifiedNameContext = ctx.qualifiedName(); List<String> target; if (qualifiedNameContext != null) { target = qualifiedNameContext.identifier().stream() .map(RuleContext::getText).collect(Collectors.toList()); } else { target = Lists.newArrayList(); } return new UnboundStar(target); }; return ParserUtils.withOrigin(ctx, f); } /** * Create an aliased expression if an alias is specified. Both single and multi-aliases are * supported. */ @Override public Expression visitNamedExpression(NamedExpressionContext ctx) { final Expression expression = expression(ctx.expression()); if (ctx.name != null) { return new Alias(expression, ctx.name.getText()); } else { return expression; } } /** * Create a comparison expression. This compares two expressions. The following comparison * operators are supported: * - Equal: '=' or '==' * - Null-safe Equal: '<=>' * - Not Equal: '<>' or '!=' * - Less than: '<' * - Less then or Equal: '<=' * - Greater than: '>' * - Greater then or Equal: '>=' */ @Override public Expression visitComparison(ComparisonContext ctx) { Expression left = expression(ctx.left); Expression right = expression(ctx.right); TerminalNode operator = (TerminalNode) ctx.comparisonOperator().getChild(0); switch (operator.getSymbol().getType()) { case DorisParser.EQ: return new EqualTo(left, right); case DorisParser.NEQ: return new Not(new EqualTo(left, right)); case DorisParser.LT: return new LessThan(left, right); case DorisParser.GT: return new GreaterThan(left, right); case DorisParser.LTE: return new LessThanEqual(left, right); case DorisParser.GTE: return new GreaterThanEqual(left, right); case DorisParser.NSEQ: return new NullSafeEqual(left, right); default: return null; } } /** * Create a not expression. * format: NOT Expression * for example: * not 1 * not 1=1 */ @Override public Expression visitLogicalNot(LogicalNotContext ctx) { Expression child = expression(ctx.booleanExpression()); return new Not(child); } @Override public Expression visitLogicalBinary(LogicalBinaryContext ctx) { Expression left = expression(ctx.left); Expression right = expression(ctx.right); switch (ctx.operator.getType()) { case DorisParser.AND: return new And(left, right); case DorisParser.OR: return new Or(left, right); default: return null; } } /** * Create a predicated expression. A predicated expression is a normal expression with a * predicate attached to it, for example: * {{{ * a + 1 IS NULL * }}} */ @Override public Expression visitPredicated(PredicatedContext ctx) { Expression e = expression(ctx.valueExpression()); if (ctx.predicate() != null) { return withPredicate(ctx.predicate(), e); } return e; } /** * match predicate type and generate different predicates. * * @param ctx PredicateContext * @param e Expression * @return Expression */ public Expression withPredicate(PredicateContext ctx, Expression e) { switch (ctx.kind.getType()) { case DorisParser.BETWEEN: return withBetween(ctx, e); default: return null; } } /** * Generate between predicate. * * @param ctx PredicateContext * @param e Expression * @return Expression */ public Expression withBetween(PredicateContext ctx, Expression e) { boolean isNotBetween = ctx.NOT() != null ? true : false; BetweenPredicate betweenPredicate = new BetweenPredicate( e, expression(ctx.lower), expression(ctx.upper) ); return isNotBetween ? new Not(betweenPredicate) : betweenPredicate; } @Override public Expression visitArithmeticUnary(ArithmeticUnaryContext ctx) { Expression e = expression(ctx); switch (ctx.operator.getType()) { case DorisParser.PLUS: return e; case DorisParser.MINUS: default: return null; } } @Override public Expression visitArithmeticBinary(ArithmeticBinaryContext ctx) { Expression left = expression(ctx.left); Expression right = expression(ctx.right); return genArithmetic(ctx.operator, left, right); } private Arithmetic genArithmetic(Token token, Expression left, Expression right) { switch (token.getType()) { case DorisParser.ASTERISK: return new Multiply(left, right); case DorisParser.SLASH: return new Divide(left, right); case DorisParser.PERCENT: return new Mod(left, right); case DorisParser.PLUS: return new Add(left, right); case DorisParser.MINUS: return new Subtract(left, right); default: return null; } } @Override public Expression visitAggFunctions(AggFunctionsContext ctx) { String functionName = ""; if (ctx.aggFunction().SUM() != null) { functionName = "sum"; } else if (ctx.aggFunction().AVG() != null) { functionName = "avg"; } return new FunctionCall(functionName, new FunctionParams(ctx.aggFunction().DISTINCT() != null, expression(ctx.aggFunction().expression()))); } @Override public Expression visitDereference(DereferenceContext ctx) { Expression e = expression(ctx.base); if (e instanceof UnboundSlot) { UnboundSlot unboundAttribute = (UnboundSlot) e; List<String> nameParts = Lists.newArrayList(unboundAttribute.getNameParts()); nameParts.add(ctx.fieldName.getText()); return new UnboundSlot(nameParts); } else { return null; } } @Override public UnboundSlot visitColumnReference(ColumnReferenceContext ctx) { return UnboundSlot.quoted(ctx.getText()); } /** * Create a NULL literal expression. */ @Override public Expression visitNullLiteral(NullLiteralContext ctx) { return new Literal(null); } @Override public Literal visitBooleanLiteral(BooleanLiteralContext ctx) { Boolean b = Boolean.valueOf(ctx.getText()); return new Literal(b); } @Override public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { Integer l = Integer.valueOf(ctx.getText()); return new Literal(l); } @Override public Literal visitStringLiteral(StringLiteralContext ctx) { String s = ctx.STRING().stream().map(ParseTree::getText).reduce((s1, s2) -> s1 + s2).orElse(""); return new Literal(s); } }
If the thread is still alive, do we also need to `Thread.currentThread().interrupt()`?
public void close() throws IOException { wasClosed = true; while (thread.isAlive()) { thread.interrupt(); try { thread.join(); } catch (InterruptedException e) { if (!thread.isAlive()) { Thread.currentThread().interrupt(); } LOG.debug("interrupted while waiting for the writer thread to die", e); } } if (thrown != null) { throw new IOException(thrown); } }
thread.interrupt();
public void close() throws IOException { wasClosed = true; while (thread.isAlive()) { thread.interrupt(); try { thread.join(); } catch (InterruptedException e) { if (!thread.isAlive()) { Thread.currentThread().interrupt(); } LOG.debug("interrupted while waiting for the writer thread to die", e); } } if (thrown != null) { throw new IOException(thrown); } }
class ChannelStateWriteRequestExecutorImpl implements ChannelStateWriteRequestExecutor { private static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriteRequestExecutorImpl.class); private static final int DEFAULT_HANDOVER_CAPACITY = 10_000; private final ChannelStateWriteRequestDispatcher dispatcher; private final BlockingDeque<ChannelStateWriteRequest> deque; private final Thread thread; private volatile Exception thrown = null; private volatile boolean wasClosed = false; ChannelStateWriteRequestExecutorImpl(ChannelStateWriteRequestDispatcher dispatcher) { this(dispatcher, new LinkedBlockingDeque<>(DEFAULT_HANDOVER_CAPACITY)); } ChannelStateWriteRequestExecutorImpl(ChannelStateWriteRequestDispatcher dispatcher, BlockingDeque<ChannelStateWriteRequest> deque) { this.dispatcher = dispatcher; this.deque = deque; this.thread = new Thread(this::run); this.thread.setDaemon(true); } @VisibleForTesting void run() { try { loop(); } catch (Exception ex) { thrown = ex; } finally { cleanupRequests(); dispatcher.close(thrown == null ? new CancellationException() : thrown); } LOG.debug("loop terminated"); } private void loop() throws Exception { while (isActive()) { try { dispatcher.dispatch(deque.take()); } catch (InterruptedException e) { if (isActive()) { LOG.debug("interrupted while waiting for a request (continue waiting)", e); } else { Thread.currentThread().interrupt(); } } } } private void cleanupRequests() { Throwable cause = thrown == null ? new CancellationException() : thrown; List<ChannelStateWriteRequest> drained = new ArrayList<>(); deque.drainTo(drained); LOG.info("discarding {} drained requests", drained.size()); for (ChannelStateWriteRequest request : drained) { request.cancel(cause); } } @Override public void start() throws IllegalStateException { this.thread.start(); } @Override public void submit(ChannelStateWriteRequest request) throws Exception { submitInternal(request, () -> deque.add(request)); } @Override public void submitPriority(ChannelStateWriteRequest request) throws Exception { submitInternal(request, () -> deque.addFirst(request)); } private void submitInternal(ChannelStateWriteRequest request, RunnableWithException action) throws Exception { try { action.run(); } catch (Exception ex) { request.cancel(ex); throw ex; } ensureRunning(); } private void ensureRunning() throws Exception { if (!isActive()) { cleanupRequests(); throw ExceptionUtils.firstOrSuppressed(new IllegalStateException("not running"), thrown); } } private boolean isActive() { return !wasClosed && thread.isAlive(); } @Override @VisibleForTesting Thread getThread() { return thread; } }
class ChannelStateWriteRequestExecutorImpl implements ChannelStateWriteRequestExecutor { private static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriteRequestExecutorImpl.class); private static final int DEFAULT_HANDOVER_CAPACITY = 10_000; private final ChannelStateWriteRequestDispatcher dispatcher; private final BlockingDeque<ChannelStateWriteRequest> deque; private final Thread thread; private volatile Exception thrown = null; private volatile boolean wasClosed = false; ChannelStateWriteRequestExecutorImpl(ChannelStateWriteRequestDispatcher dispatcher) { this(dispatcher, new LinkedBlockingDeque<>(DEFAULT_HANDOVER_CAPACITY)); } ChannelStateWriteRequestExecutorImpl(ChannelStateWriteRequestDispatcher dispatcher, BlockingDeque<ChannelStateWriteRequest> deque) { this.dispatcher = dispatcher; this.deque = deque; this.thread = new Thread(this::run); this.thread.setDaemon(true); } @VisibleForTesting void run() { try { loop(); } catch (Exception ex) { thrown = ex; } finally { cleanupRequests(); dispatcher.fail(thrown == null ? new CancellationException() : thrown); } LOG.debug("loop terminated"); } private void loop() throws Exception { while (!wasClosed) { try { dispatcher.dispatch(deque.take()); } catch (InterruptedException e) { if (!wasClosed) { LOG.debug("interrupted while waiting for a request (continue waiting)", e); } else { Thread.currentThread().interrupt(); } } } } private void cleanupRequests() { Throwable cause = thrown == null ? new CancellationException() : thrown; List<ChannelStateWriteRequest> drained = new ArrayList<>(); deque.drainTo(drained); LOG.info("discarding {} drained requests", drained.size()); for (ChannelStateWriteRequest request : drained) { request.cancel(cause); } } @Override public void start() throws IllegalStateException { this.thread.start(); } @Override public void submit(ChannelStateWriteRequest request) throws Exception { submitInternal(request, () -> deque.add(request)); } @Override public void submitPriority(ChannelStateWriteRequest request) throws Exception { submitInternal(request, () -> deque.addFirst(request)); } private void submitInternal(ChannelStateWriteRequest request, RunnableWithException action) throws Exception { try { action.run(); } catch (Exception ex) { request.cancel(ex); throw ex; } ensureRunning(); } private void ensureRunning() throws Exception { if (wasClosed || !thread.isAlive()) { cleanupRequests(); throw ExceptionUtils.firstOrSuppressed(new IllegalStateException("not running"), thrown); } } @Override @VisibleForTesting Thread getThread() { return thread; } }
not necessary to log warning, I will add it to query trace log.
private void prepareRelatedMVs(Set<Table> queryTables, Set<MaterializedView> relatedMvs, boolean isSyncMV) { String queryExcludingMVNames = connectContext.getSessionVariable().getQueryExcludingMVNames(); String queryIncludingMVNames = connectContext.getSessionVariable().getQueryIncludingMVNames(); if (!Strings.isNullOrEmpty(queryExcludingMVNames) || !Strings.isNullOrEmpty(queryIncludingMVNames)) { Set<String> queryExcludingMVNamesSet = Sets.newHashSet(queryExcludingMVNames.split(",")); Set<String> queryIncludingMVNamesSet = Sets.newHashSet(queryIncludingMVNames.split(",")); relatedMvs = relatedMvs.stream() .filter(mv -> queryIncludingMVNamesSet.contains(mv.getName())) .filter(mv -> !queryExcludingMVNamesSet.contains(mv.getName())) .collect(Collectors.toSet()); } if (relatedMvs.isEmpty()) { logMVPrepare(connectContext, "[SYNC={}] There are no related mvs for the query plan", isSyncMV); return; } Set<ColumnRefOperator> originQueryColumns = Sets.newHashSet(queryColumnRefFactory.getColumnRefs()); for (MaterializedView mv : relatedMvs) { if (!mv.isValidPlan()) { continue; } try { preprocessMv(mv, queryTables, originQueryColumns, isSyncMV); } catch (Exception e) { List<String> tableNames = queryTables.stream().map(Table::getName).collect(Collectors.toList()); LOG.warn("[SYNC={}] Preprocess mv {} failed for query tables:{}", isSyncMV, mv.getName(), tableNames, e); } } if (relatedMvs.isEmpty()) { logMVPrepare(connectContext, "[SYNC={}] There are no related mvs after process", isSyncMV); return; } List<String> relatedMvNames = relatedMvs.stream().map(mv -> mv.getName()).collect(Collectors.toList()); List<String> candidateMvNames = context.getCandidateMvs().stream() .map(materializationContext -> materializationContext.getMv().getName()).collect(Collectors.toList()); logMVPrepare(connectContext, "[SYNC={}] RelatedMVs: {}, CandidateMVs: {}", isSyncMV, relatedMvNames, candidateMvNames); }
private void prepareRelatedMVs(Set<Table> queryTables, Set<MaterializedView> relatedMvs, boolean isSyncMV) { String queryExcludingMVNames = connectContext.getSessionVariable().getQueryExcludingMVNames(); String queryIncludingMVNames = connectContext.getSessionVariable().getQueryIncludingMVNames(); if (!Strings.isNullOrEmpty(queryExcludingMVNames) || !Strings.isNullOrEmpty(queryIncludingMVNames)) { Set<String> queryExcludingMVNamesSet = Sets.newHashSet(queryExcludingMVNames.split(",")); Set<String> queryIncludingMVNamesSet = Sets.newHashSet(queryIncludingMVNames.split(",")); relatedMvs = relatedMvs.stream() .filter(mv -> queryIncludingMVNamesSet.contains(mv.getName())) .filter(mv -> !queryExcludingMVNamesSet.contains(mv.getName())) .collect(Collectors.toSet()); } if (relatedMvs.isEmpty()) { logMVPrepare(connectContext, "[SYNC={}] There are no related mvs for the query plan", isSyncMV); return; } Set<ColumnRefOperator> originQueryColumns = Sets.newHashSet(queryColumnRefFactory.getColumnRefs()); for (MaterializedView mv : relatedMvs) { if (!mv.isValidPlan()) { continue; } try { preprocessMv(mv, queryTables, originQueryColumns, isSyncMV); } catch (Exception e) { List<String> tableNames = queryTables.stream().map(Table::getName).collect(Collectors.toList()); LOG.warn("[SYNC={}] Preprocess mv {} failed for query tables:{}", isSyncMV, mv.getName(), tableNames, e); } } if (relatedMvs.isEmpty()) { logMVPrepare(connectContext, "[SYNC={}] There are no related mvs after process", isSyncMV); return; } List<String> relatedMvNames = relatedMvs.stream().map(mv -> mv.getName()).collect(Collectors.toList()); List<String> candidateMvNames = context.getCandidateMvs().stream() .map(materializationContext -> materializationContext.getMv().getName()).collect(Collectors.toList()); logMVPrepare(connectContext, "[SYNC={}] RelatedMVs: {}, CandidateMVs: {}", isSyncMV, relatedMvNames, candidateMvNames); }
class MvRewritePreprocessor { private static final Logger LOG = LogManager.getLogger(MvRewritePreprocessor.class); private final ConnectContext connectContext; private final ColumnRefFactory queryColumnRefFactory; private final OptimizerContext context; private final OptExpression logicOperatorTree; public MvRewritePreprocessor(ConnectContext connectContext, ColumnRefFactory queryColumnRefFactory, OptimizerContext context, OptExpression logicOperatorTree) { this.connectContext = connectContext; this.queryColumnRefFactory = queryColumnRefFactory; this.context = context; this.logicOperatorTree = logicOperatorTree; } public void prepareMvCandidatesForPlan() { Set<Table> queryTables = MvUtils.getAllTables(logicOperatorTree).stream().collect(Collectors.toSet()); Set<MaterializedView> relatedMvs = MvUtils.getRelatedMvs(connectContext.getSessionVariable().getNestedMvRewriteMaxLevel(), queryTables); prepareRelatedMVs(queryTables, relatedMvs, false); } public void prepareSyncMvCandidatesForPlan() { Set<Table> queryTables = MvUtils.getAllTables(logicOperatorTree).stream().collect(Collectors.toSet()); Set<MaterializedView> relatedMvs = Sets.newHashSet(); for (Table table : queryTables) { if (!(table instanceof OlapTable)) { continue; } OlapTable olapTable = (OlapTable) table; for (MaterializedIndexMeta indexMeta : olapTable.getVisibleIndexMetas()) { long indexId = indexMeta.getIndexId(); if (indexMeta.getIndexId() == olapTable.getBaseIndexId()) { continue; } if (Strings.isNullOrEmpty(indexMeta.getViewDefineSql())) { continue; } if (!MVUtils.containComplexExpresses(indexMeta)) { continue; } try { long dbId = indexMeta.getDbId(); String viewDefineSql = indexMeta.getViewDefineSql(); String mvName = olapTable.getIndexNameById(indexId); Database db = GlobalStateMgr.getCurrentState().getDb(dbId); DistributionInfo baseTableDistributionInfo = olapTable.getDefaultDistributionInfo(); DistributionInfo mvDistributionInfo = baseTableDistributionInfo.copy(); Set<String> mvColumnNames = indexMeta.getSchema().stream().map(Column::getName).collect(Collectors.toSet()); if (baseTableDistributionInfo.getType() == DistributionInfoType.HASH) { HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) baseTableDistributionInfo; Set<String> distributedColumns = hashDistributionInfo.getDistributionColumns().stream().map(Column::getName) .collect(Collectors.toSet()); List<Column> newDistributionColumns = Lists.newArrayList(); for (Column mvColumn : indexMeta.getSchema()) { if (distributedColumns.contains(mvColumn.getName())) { newDistributionColumns.add(mvColumn); } } if (newDistributionColumns.size() != distributedColumns.size()) { mvDistributionInfo = new RandomDistributionInfo(); } else { ((HashDistributionInfo) mvDistributionInfo).setDistributionColumns(newDistributionColumns); } } PartitionInfo basePartitionInfo = olapTable.getPartitionInfo(); PartitionInfo mvPartitionInfo = basePartitionInfo; if (basePartitionInfo.isPartitioned()) { if (basePartitionInfo.getPartitionColumns().stream() .anyMatch(x -> !mvColumnNames.contains(x.getName())) || !(basePartitionInfo instanceof ExpressionRangePartitionInfo)) { mvPartitionInfo = new SinglePartitionInfo(); } } MaterializedView.MvRefreshScheme mvRefreshScheme = new MaterializedView.MvRefreshScheme(MaterializedView.RefreshType.SYNC); MaterializedView mv = new MaterializedView(db, mvName, indexMeta, olapTable, mvPartitionInfo, mvDistributionInfo, mvRefreshScheme); mv.setViewDefineSql(viewDefineSql); mv.setBaseIndexId(indexId); relatedMvs.add(mv); } catch (Exception e) { LOG.warn("error happens when parsing create sync materialized view stmt [{}] use new parser", indexId, e); } } } prepareRelatedMVs(queryTables, relatedMvs, true); } private void preprocessMv(MaterializedView mv, Set<Table> queryTables, Set<ColumnRefOperator> originQueryColumns, boolean isSyncMV) { if (!mv.isActive()) { logMVPrepare(connectContext, mv, "[SYNC={}] MV is not active: {}", isSyncMV, mv.getName()); return; } MvPlanContext mvPlanContext = CachingMvPlanContextBuilder.getInstance().getPlanContext(mv, connectContext.getSessionVariable().isEnableMaterializedViewPlanCache()); if (mvPlanContext == null) { logMVPrepare(connectContext, mv, "[SYNC={}] MV plan is not valid: {}, cannot generate plan for rewrite", isSyncMV, mv.getName()); return; } if (!mvPlanContext.isValidMvPlan()) { mv.setPlanMode(MaterializedView.PlanMode.INVALID); if (mvPlanContext.getLogicalPlan() != null) { logMVPrepare(connectContext, mv, "[SYNC={}] MV plan is not valid: {}, plan:\n {}", isSyncMV, mv.getName(), mvPlanContext.getLogicalPlan().explain()); } else { logMVPrepare(connectContext, mv, "[SYNC={}] MV plan is not valid: {}", isSyncMV, mv.getName()); } return; } Set<String> partitionNamesToRefresh = mv.getPartitionNamesToRefreshForMv(true); PartitionInfo partitionInfo = mv.getPartitionInfo(); if (partitionInfo instanceof SinglePartitionInfo) { if (!partitionNamesToRefresh.isEmpty()) { StringBuilder sb = new StringBuilder(); for (BaseTableInfo base : mv.getBaseTableInfos()) { String versionInfo = Joiner.on(",").join(mv.getBaseTableLatestPartitionInfo(base.getTable())); sb.append(String.format("base table %s version: %s; ", base, versionInfo)); } logMVPrepare(connectContext, mv, "[SYNC={}] MV {} is outdated, stale partitions {}, detailed version info: {}", isSyncMV, mv.getName(), partitionNamesToRefresh, sb.toString()); return; } } else if (!mv.getPartitionNames().isEmpty() && partitionNamesToRefresh.containsAll(mv.getPartitionNames())) { StringBuilder sb = new StringBuilder(); for (BaseTableInfo base : mv.getBaseTableInfos()) { String versionInfo = Joiner.on(",").join(mv.getBaseTableLatestPartitionInfo(base.getTable())); sb.append(String.format("base table %s version: %s; ", base, versionInfo)); } logMVPrepare(connectContext, mv, "[SYNC={}] MV {} is outdated and all its partitions need to be " + "refreshed: {}, detailed info: {}", isSyncMV, mv.getName(), partitionNamesToRefresh, sb.toString()); return; } Preconditions.checkState(mvPlanContext != null); OptExpression mvPlan = mvPlanContext.getLogicalPlan(); Preconditions.checkState(mvPlan != null); ScalarOperator mvPartialPartitionPredicates = null; if (mv.getPartitionInfo() instanceof ExpressionRangePartitionInfo && !partitionNamesToRefresh.isEmpty()) { mvPartialPartitionPredicates = getMvPartialPartitionPredicates(mv, mvPlan, partitionNamesToRefresh); if (mvPartialPartitionPredicates == null) { logMVPrepare(connectContext, mv, "[SYNC={}] Partitioned MV {} is outdated which contains some partitions " + "to be refreshed: {}", isSyncMV, mv.getName(), partitionNamesToRefresh); return; } } if (connectContext.getDumpInfo() != null) { String dbName = connectContext.getGlobalStateMgr().getDb(mv.getDbId()).getFullName(); connectContext.getDumpInfo().addTable(dbName, mv); } List<Table> baseTables = MvUtils.getAllTables(mvPlan); List<Table> intersectingTables = baseTables.stream().filter(queryTables::contains).collect(Collectors.toList()); MaterializationContext materializationContext = new MaterializationContext(context, mv, mvPlan, queryColumnRefFactory, mvPlanContext.getRefFactory(), partitionNamesToRefresh, baseTables, originQueryColumns, intersectingTables, mvPartialPartitionPredicates); List<ColumnRefOperator> mvOutputColumns = mvPlanContext.getOutputColumns(); LogicalOlapScanOperator scanMvOp = createScanMvOperator(materializationContext, partitionNamesToRefresh); materializationContext.setScanMvOperator(scanMvOp); List<ColumnRefOperator> scanMvOutputColumns = Lists.newArrayList(); for (Column column : mv.getBaseSchema()) { scanMvOutputColumns.add(scanMvOp.getColumnReference(column)); } Preconditions.checkState(mvOutputColumns.size() == scanMvOutputColumns.size()); Map<ColumnRefOperator, ColumnRefOperator> outputMapping = Maps.newHashMap(); for (int i = 0; i < mvOutputColumns.size(); i++) { outputMapping.put(mvOutputColumns.get(i), scanMvOutputColumns.get(i)); } materializationContext.setOutputMapping(outputMapping); context.addCandidateMvs(materializationContext); logMVPrepare(connectContext, mv, "[SYNC={}] Prepare MV {} success", isSyncMV, mv.getName()); } /** * Make a LogicalOlapScanOperator by using MV's schema which includes: * - partition infos. * - distribution infos. * - original MV's predicates which can be deduced from MV opt expression and be used * for partition/distribution pruning. */ private LogicalOlapScanOperator createScanMvOperator(MaterializationContext mvContext, Set<String> excludedPartitions) { final MaterializedView mv = mvContext.getMv(); final ImmutableMap.Builder<ColumnRefOperator, Column> colRefToColumnMetaMapBuilder = ImmutableMap.builder(); final ImmutableMap.Builder<Column, ColumnRefOperator> columnMetaToColRefMapBuilder = ImmutableMap.builder(); final ColumnRefFactory columnRefFactory = mvContext.getQueryRefFactory(); int relationId = columnRefFactory.getNextRelationId(); Set<String> columnNames = Sets.newHashSet(); for (Column column : mv.getBaseSchema()) { ColumnRefOperator columnRef = columnRefFactory.create(column.getName(), column.getType(), column.isAllowNull()); columnRefFactory.updateColumnToRelationIds(columnRef.getId(), relationId); columnRefFactory.updateColumnRefToColumns(columnRef, column, mv); colRefToColumnMetaMapBuilder.put(columnRef, column); columnMetaToColRefMapBuilder.put(column, columnRef); columnNames.add(column.getName()); } for (Column column : mv.getFullSchema()) { if (columnNames.contains(column.getName())) { continue; } ColumnRefOperator columnRef = columnRefFactory.create(column.getName(), column.getType(), column.isAllowNull()); columnRefFactory.updateColumnToRelationIds(columnRef.getId(), relationId); columnRefFactory.updateColumnRefToColumns(columnRef, column, mv); colRefToColumnMetaMapBuilder.put(columnRef, column); columnMetaToColRefMapBuilder.put(column, columnRef); } final Map<Column, ColumnRefOperator> columnMetaToColRefMap = columnMetaToColRefMapBuilder.build(); List<Long> selectPartitionIds = Lists.newArrayList(); List<Long> selectTabletIds = Lists.newArrayList(); List<String> selectedPartitionNames = Lists.newArrayList(); for (Partition p : mv.getPartitions()) { if (!excludedPartitions.contains(p.getName()) && p.hasData()) { selectPartitionIds.add(p.getId()); selectedPartitionNames.add(p.getName()); for (PhysicalPartition physicalPartition : p.getSubPartitions()) { MaterializedIndex materializedIndex = physicalPartition.getIndex(mv.getBaseIndexId()); selectTabletIds.addAll(materializedIndex.getTabletIdsInOrder()); } } } final PartitionNames partitionNames = new PartitionNames(false, selectedPartitionNames); return LogicalOlapScanOperator.builder() .setTable(mv) .setColRefToColumnMetaMap(colRefToColumnMetaMapBuilder.build()) .setColumnMetaToColRefMap(columnMetaToColRefMap) .setDistributionSpec(getTableDistributionSpec(mvContext, columnMetaToColRefMap)) .setSelectedIndexId(mv.getBaseIndexId()) .setSelectedPartitionId(selectPartitionIds) .setPartitionNames(partitionNames) .setSelectedTabletId(selectTabletIds) .setHintsTabletIds(Collections.emptyList()) .setHintsReplicaIds(Collections.emptyList()) .setHasTableHints(false) .setUsePkIndex(false) .build(); } private DistributionSpec getTableDistributionSpec( MaterializationContext mvContext, Map<Column, ColumnRefOperator> columnMetaToColRefMap) { final MaterializedView mv = mvContext.getMv(); DistributionSpec distributionSpec = null; DistributionInfo distributionInfo = mv.getDefaultDistributionInfo(); if (distributionInfo.getType() == DistributionInfoType.HASH) { HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo; List<Column> distributedColumns = hashDistributionInfo.getDistributionColumns(); List<Integer> hashDistributeColumns = new ArrayList<>(); for (Column distributedColumn : distributedColumns) { hashDistributeColumns.add(columnMetaToColRefMap.get(distributedColumn).getId()); } final HashDistributionDesc hashDistributionDesc = new HashDistributionDesc(hashDistributeColumns, HashDistributionDesc.SourceType.LOCAL); distributionSpec = DistributionSpec.createHashDistributionSpec(hashDistributionDesc); } else if (distributionInfo.getType() == DistributionInfoType.RANDOM) { distributionSpec = DistributionSpec.createAnyDistributionSpec(); } return distributionSpec; } }
class MvRewritePreprocessor { private static final Logger LOG = LogManager.getLogger(MvRewritePreprocessor.class); private final ConnectContext connectContext; private final ColumnRefFactory queryColumnRefFactory; private final OptimizerContext context; private final OptExpression logicOperatorTree; public MvRewritePreprocessor(ConnectContext connectContext, ColumnRefFactory queryColumnRefFactory, OptimizerContext context, OptExpression logicOperatorTree) { this.connectContext = connectContext; this.queryColumnRefFactory = queryColumnRefFactory; this.context = context; this.logicOperatorTree = logicOperatorTree; } public void prepareMvCandidatesForPlan() { Set<Table> queryTables = MvUtils.getAllTables(logicOperatorTree).stream().collect(Collectors.toSet()); Set<MaterializedView> relatedMvs = MvUtils.getRelatedMvs(connectContext.getSessionVariable().getNestedMvRewriteMaxLevel(), queryTables); prepareRelatedMVs(queryTables, relatedMvs, false); } public void prepareSyncMvCandidatesForPlan() { Set<Table> queryTables = MvUtils.getAllTables(logicOperatorTree).stream().collect(Collectors.toSet()); Set<MaterializedView> relatedMvs = Sets.newHashSet(); for (Table table : queryTables) { if (!(table instanceof OlapTable)) { continue; } OlapTable olapTable = (OlapTable) table; for (MaterializedIndexMeta indexMeta : olapTable.getVisibleIndexMetas()) { long indexId = indexMeta.getIndexId(); if (indexMeta.getIndexId() == olapTable.getBaseIndexId()) { continue; } if (Strings.isNullOrEmpty(indexMeta.getViewDefineSql())) { continue; } if (!MVUtils.containComplexExpresses(indexMeta)) { continue; } try { long dbId = indexMeta.getDbId(); String viewDefineSql = indexMeta.getViewDefineSql(); String mvName = olapTable.getIndexNameById(indexId); Database db = GlobalStateMgr.getCurrentState().getDb(dbId); DistributionInfo baseTableDistributionInfo = olapTable.getDefaultDistributionInfo(); DistributionInfo mvDistributionInfo = baseTableDistributionInfo.copy(); Set<String> mvColumnNames = indexMeta.getSchema().stream().map(Column::getName).collect(Collectors.toSet()); if (baseTableDistributionInfo.getType() == DistributionInfoType.HASH) { HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) baseTableDistributionInfo; Set<String> distributedColumns = hashDistributionInfo.getDistributionColumns().stream().map(Column::getName) .collect(Collectors.toSet()); List<Column> newDistributionColumns = Lists.newArrayList(); for (Column mvColumn : indexMeta.getSchema()) { if (distributedColumns.contains(mvColumn.getName())) { newDistributionColumns.add(mvColumn); } } if (newDistributionColumns.size() != distributedColumns.size()) { mvDistributionInfo = new RandomDistributionInfo(); } else { ((HashDistributionInfo) mvDistributionInfo).setDistributionColumns(newDistributionColumns); } } PartitionInfo basePartitionInfo = olapTable.getPartitionInfo(); PartitionInfo mvPartitionInfo = basePartitionInfo; if (basePartitionInfo.isPartitioned()) { if (basePartitionInfo.getPartitionColumns().stream() .anyMatch(x -> !mvColumnNames.contains(x.getName())) || !(basePartitionInfo instanceof ExpressionRangePartitionInfo)) { mvPartitionInfo = new SinglePartitionInfo(); } } MaterializedView.MvRefreshScheme mvRefreshScheme = new MaterializedView.MvRefreshScheme(MaterializedView.RefreshType.SYNC); MaterializedView mv = new MaterializedView(db, mvName, indexMeta, olapTable, mvPartitionInfo, mvDistributionInfo, mvRefreshScheme); mv.setViewDefineSql(viewDefineSql); mv.setBaseIndexId(indexId); relatedMvs.add(mv); } catch (Exception e) { LOG.warn("error happens when parsing create sync materialized view stmt [{}] use new parser", indexId, e); } } } prepareRelatedMVs(queryTables, relatedMvs, true); } private void preprocessMv(MaterializedView mv, Set<Table> queryTables, Set<ColumnRefOperator> originQueryColumns, boolean isSyncMV) { if (!mv.isActive()) { logMVPrepare(connectContext, mv, "[SYNC={}] MV is not active: {}", isSyncMV, mv.getName()); return; } MvPlanContext mvPlanContext = CachingMvPlanContextBuilder.getInstance().getPlanContext(mv, connectContext.getSessionVariable().isEnableMaterializedViewPlanCache()); if (mvPlanContext == null) { logMVPrepare(connectContext, mv, "[SYNC={}] MV plan is not valid: {}, cannot generate plan for rewrite", isSyncMV, mv.getName()); return; } if (!mvPlanContext.isValidMvPlan()) { mv.setPlanMode(MaterializedView.PlanMode.INVALID); if (mvPlanContext.getLogicalPlan() != null) { logMVPrepare(connectContext, mv, "[SYNC={}] MV plan is not valid: {}, plan:\n {}", isSyncMV, mv.getName(), mvPlanContext.getLogicalPlan().debugString()); } else { logMVPrepare(connectContext, mv, "[SYNC={}] MV plan is not valid: {}", isSyncMV, mv.getName()); } return; } Set<String> partitionNamesToRefresh = mv.getPartitionNamesToRefreshForMv(true); PartitionInfo partitionInfo = mv.getPartitionInfo(); if (partitionInfo instanceof SinglePartitionInfo) { if (!partitionNamesToRefresh.isEmpty()) { StringBuilder sb = new StringBuilder(); for (BaseTableInfo base : mv.getBaseTableInfos()) { String versionInfo = Joiner.on(",").join(mv.getBaseTableLatestPartitionInfo(base.getTable())); sb.append(String.format("base table %s version: %s; ", base, versionInfo)); } logMVPrepare(connectContext, mv, "[SYNC={}] MV {} is outdated, stale partitions {}, detailed version info: {}", isSyncMV, mv.getName(), partitionNamesToRefresh, sb.toString()); return; } } else if (!mv.getPartitionNames().isEmpty() && partitionNamesToRefresh.containsAll(mv.getPartitionNames())) { StringBuilder sb = new StringBuilder(); for (BaseTableInfo base : mv.getBaseTableInfos()) { String versionInfo = Joiner.on(",").join(mv.getBaseTableLatestPartitionInfo(base.getTable())); sb.append(String.format("base table %s version: %s; ", base, versionInfo)); } logMVPrepare(connectContext, mv, "[SYNC={}] MV {} is outdated and all its partitions need to be " + "refreshed: {}, detailed info: {}", isSyncMV, mv.getName(), partitionNamesToRefresh, sb.toString()); return; } Preconditions.checkState(mvPlanContext != null); OptExpression mvPlan = mvPlanContext.getLogicalPlan(); Preconditions.checkState(mvPlan != null); ScalarOperator mvPartialPartitionPredicates = null; if (mv.getPartitionInfo() instanceof ExpressionRangePartitionInfo && !partitionNamesToRefresh.isEmpty()) { mvPartialPartitionPredicates = getMvPartialPartitionPredicates(mv, mvPlan, partitionNamesToRefresh); if (mvPartialPartitionPredicates == null) { logMVPrepare(connectContext, mv, "[SYNC={}] Partitioned MV {} is outdated which contains some partitions " + "to be refreshed: {}", isSyncMV, mv.getName(), partitionNamesToRefresh); return; } } if (connectContext.getDumpInfo() != null) { String dbName = connectContext.getGlobalStateMgr().getDb(mv.getDbId()).getFullName(); connectContext.getDumpInfo().addTable(dbName, mv); } List<Table> baseTables = MvUtils.getAllTables(mvPlan); List<Table> intersectingTables = baseTables.stream().filter(queryTables::contains).collect(Collectors.toList()); MaterializationContext materializationContext = new MaterializationContext(context, mv, mvPlan, queryColumnRefFactory, mvPlanContext.getRefFactory(), partitionNamesToRefresh, baseTables, originQueryColumns, intersectingTables, mvPartialPartitionPredicates); List<ColumnRefOperator> mvOutputColumns = mvPlanContext.getOutputColumns(); LogicalOlapScanOperator scanMvOp = createScanMvOperator(materializationContext, partitionNamesToRefresh); materializationContext.setScanMvOperator(scanMvOp); List<ColumnRefOperator> scanMvOutputColumns = Lists.newArrayList(); for (Column column : mv.getBaseSchema()) { scanMvOutputColumns.add(scanMvOp.getColumnReference(column)); } Preconditions.checkState(mvOutputColumns.size() == scanMvOutputColumns.size()); Map<ColumnRefOperator, ColumnRefOperator> outputMapping = Maps.newHashMap(); for (int i = 0; i < mvOutputColumns.size(); i++) { outputMapping.put(mvOutputColumns.get(i), scanMvOutputColumns.get(i)); } materializationContext.setOutputMapping(outputMapping); context.addCandidateMvs(materializationContext); logMVPrepare(connectContext, mv, "[SYNC={}] Prepare MV {} success", isSyncMV, mv.getName()); } /** * Make a LogicalOlapScanOperator by using MV's schema which includes: * - partition infos. * - distribution infos. * - original MV's predicates which can be deduced from MV opt expression and be used * for partition/distribution pruning. */ private LogicalOlapScanOperator createScanMvOperator(MaterializationContext mvContext, Set<String> excludedPartitions) { final MaterializedView mv = mvContext.getMv(); final ImmutableMap.Builder<ColumnRefOperator, Column> colRefToColumnMetaMapBuilder = ImmutableMap.builder(); final ImmutableMap.Builder<Column, ColumnRefOperator> columnMetaToColRefMapBuilder = ImmutableMap.builder(); final ColumnRefFactory columnRefFactory = mvContext.getQueryRefFactory(); int relationId = columnRefFactory.getNextRelationId(); Set<String> columnNames = Sets.newHashSet(); for (Column column : mv.getBaseSchema()) { ColumnRefOperator columnRef = columnRefFactory.create(column.getName(), column.getType(), column.isAllowNull()); columnRefFactory.updateColumnToRelationIds(columnRef.getId(), relationId); columnRefFactory.updateColumnRefToColumns(columnRef, column, mv); colRefToColumnMetaMapBuilder.put(columnRef, column); columnMetaToColRefMapBuilder.put(column, columnRef); columnNames.add(column.getName()); } for (Column column : mv.getFullSchema()) { if (columnNames.contains(column.getName())) { continue; } ColumnRefOperator columnRef = columnRefFactory.create(column.getName(), column.getType(), column.isAllowNull()); columnRefFactory.updateColumnToRelationIds(columnRef.getId(), relationId); columnRefFactory.updateColumnRefToColumns(columnRef, column, mv); colRefToColumnMetaMapBuilder.put(columnRef, column); columnMetaToColRefMapBuilder.put(column, columnRef); } final Map<Column, ColumnRefOperator> columnMetaToColRefMap = columnMetaToColRefMapBuilder.build(); List<Long> selectPartitionIds = Lists.newArrayList(); List<Long> selectTabletIds = Lists.newArrayList(); List<String> selectedPartitionNames = Lists.newArrayList(); for (Partition p : mv.getPartitions()) { if (!excludedPartitions.contains(p.getName()) && p.hasData()) { selectPartitionIds.add(p.getId()); selectedPartitionNames.add(p.getName()); for (PhysicalPartition physicalPartition : p.getSubPartitions()) { MaterializedIndex materializedIndex = physicalPartition.getIndex(mv.getBaseIndexId()); selectTabletIds.addAll(materializedIndex.getTabletIdsInOrder()); } } } final PartitionNames partitionNames = new PartitionNames(false, selectedPartitionNames); return LogicalOlapScanOperator.builder() .setTable(mv) .setColRefToColumnMetaMap(colRefToColumnMetaMapBuilder.build()) .setColumnMetaToColRefMap(columnMetaToColRefMap) .setDistributionSpec(getTableDistributionSpec(mvContext, columnMetaToColRefMap)) .setSelectedIndexId(mv.getBaseIndexId()) .setSelectedPartitionId(selectPartitionIds) .setPartitionNames(partitionNames) .setSelectedTabletId(selectTabletIds) .setHintsTabletIds(Collections.emptyList()) .setHintsReplicaIds(Collections.emptyList()) .setHasTableHints(false) .setUsePkIndex(false) .build(); } private DistributionSpec getTableDistributionSpec( MaterializationContext mvContext, Map<Column, ColumnRefOperator> columnMetaToColRefMap) { final MaterializedView mv = mvContext.getMv(); DistributionSpec distributionSpec = null; DistributionInfo distributionInfo = mv.getDefaultDistributionInfo(); if (distributionInfo.getType() == DistributionInfoType.HASH) { HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo; List<Column> distributedColumns = hashDistributionInfo.getDistributionColumns(); List<Integer> hashDistributeColumns = new ArrayList<>(); for (Column distributedColumn : distributedColumns) { hashDistributeColumns.add(columnMetaToColRefMap.get(distributedColumn).getId()); } final HashDistributionDesc hashDistributionDesc = new HashDistributionDesc(hashDistributeColumns, HashDistributionDesc.SourceType.LOCAL); distributionSpec = DistributionSpec.createHashDistributionSpec(hashDistributionDesc); } else if (distributionInfo.getType() == DistributionInfoType.RANDOM) { distributionSpec = DistributionSpec.createAnyDistributionSpec(); } return distributionSpec; } }
convert TIME, FIXED,BINARY to VARBINARY?
public static Type fromIcebergType(org.apache.iceberg.types.Type icebergType) { if (icebergType == null) { return Type.NULL; } PrimitiveType primitiveType; switch (icebergType.typeId()) { case BOOLEAN: primitiveType = PrimitiveType.BOOLEAN; break; case INTEGER: primitiveType = PrimitiveType.INT; break; case LONG: primitiveType = PrimitiveType.BIGINT; break; case FLOAT: primitiveType = PrimitiveType.FLOAT; break; case DOUBLE: primitiveType = PrimitiveType.DOUBLE; break; case DATE: primitiveType = PrimitiveType.DATE; break; case TIMESTAMP: primitiveType = PrimitiveType.DATETIME; break; case STRING: case UUID: return ScalarType.createDefaultExternalTableString(); case DECIMAL: int precision = ((Types.DecimalType) icebergType).precision(); int scale = ((Types.DecimalType) icebergType).scale(); return ScalarType.createUnifiedDecimalType(precision, scale); case LIST: Type type = convertToArrayTypeForIceberg(icebergType); if (type.isArrayType()) { return type; } else { return Type.UNKNOWN_TYPE; } case MAP: Type mapType = convertToMapTypeForIceberg(icebergType); if (mapType.isMapType()) { return mapType; } else { return Type.UNKNOWN_TYPE; } case STRUCT: List<Types.NestedField> fields = icebergType.asStructType().fields(); Preconditions.checkArgument(fields.size() > 0); ArrayList<StructField> structFields = new ArrayList<>(fields.size()); for (Types.NestedField field : fields) { String fieldName = field.name(); Type fieldType = fromIcebergType(field.type()); if (fieldType.isUnknown()) { return Type.UNKNOWN_TYPE; } structFields.add(new StructField(fieldName, fieldType)); } return new StructType(structFields); case TIME: case FIXED: case BINARY: return Type.VARBINARY; default: primitiveType = PrimitiveType.UNKNOWN_TYPE; } return ScalarType.createType(primitiveType); }
case FIXED:
public static Type fromIcebergType(org.apache.iceberg.types.Type icebergType) { if (icebergType == null) { return Type.NULL; } PrimitiveType primitiveType; switch (icebergType.typeId()) { case BOOLEAN: primitiveType = PrimitiveType.BOOLEAN; break; case INTEGER: primitiveType = PrimitiveType.INT; break; case LONG: primitiveType = PrimitiveType.BIGINT; break; case FLOAT: primitiveType = PrimitiveType.FLOAT; break; case DOUBLE: primitiveType = PrimitiveType.DOUBLE; break; case DATE: primitiveType = PrimitiveType.DATE; break; case TIMESTAMP: primitiveType = PrimitiveType.DATETIME; break; case STRING: case UUID: return ScalarType.createDefaultExternalTableString(); case DECIMAL: int precision = ((Types.DecimalType) icebergType).precision(); int scale = ((Types.DecimalType) icebergType).scale(); return ScalarType.createUnifiedDecimalType(precision, scale); case LIST: Type type = convertToArrayTypeForIceberg(icebergType); if (type.isArrayType()) { return type; } else { return Type.UNKNOWN_TYPE; } case MAP: Type mapType = convertToMapTypeForIceberg(icebergType); if (mapType.isMapType()) { return mapType; } else { return Type.UNKNOWN_TYPE; } case STRUCT: List<Types.NestedField> fields = icebergType.asStructType().fields(); Preconditions.checkArgument(fields.size() > 0); ArrayList<StructField> structFields = new ArrayList<>(fields.size()); for (Types.NestedField field : fields) { String fieldName = field.name(); Type fieldType = fromIcebergType(field.type()); if (fieldType.isUnknown()) { return Type.UNKNOWN_TYPE; } structFields.add(new StructField(fieldName, fieldType)); } return new StructType(structFields); case BINARY: return Type.VARBINARY; case TIME: case FIXED: default: primitiveType = PrimitiveType.UNKNOWN_TYPE; } return ScalarType.createType(primitiveType); }
class ColumnTypeConverter { public static final String DECIMAL_PATTERN = "^decimal\\((\\d+), *(\\d+)\\)"; public static final String COMPLEX_PATTERN = "([0-9a-z<>(),:_ ]+)"; public static final String ARRAY_PATTERN = "^array<" + COMPLEX_PATTERN + ">"; public static final String MAP_PATTERN = "^map<" + COMPLEX_PATTERN + ">"; public static final String STRUCT_PATTERN = "^struct<" + COMPLEX_PATTERN + ">"; public static final String CHAR_PATTERN = "^char\\(([0-9]+)\\)"; public static final String VARCHAR_PATTERN = "^varchar\\(([0-9,-1]+)\\)"; protected static final List<String> HIVE_UNSUPPORTED_TYPES = Arrays.asList("BINARY", "UNIONTYPE"); public static Type fromHiveType(String hiveType) { String typeUpperCase = getTypeKeyword(hiveType).toUpperCase(); PrimitiveType primitiveType; switch (typeUpperCase) { case "TINYINT": primitiveType = PrimitiveType.TINYINT; break; case "SMALLINT": primitiveType = PrimitiveType.SMALLINT; break; case "INT": case "INTEGER": primitiveType = PrimitiveType.INT; break; case "BIGINT": primitiveType = PrimitiveType.BIGINT; break; case "FLOAT": primitiveType = PrimitiveType.FLOAT; break; case "DOUBLE": case "DOUBLE PRECISION": primitiveType = PrimitiveType.DOUBLE; break; case "DECIMAL": case "NUMERIC": primitiveType = PrimitiveType.DECIMAL32; break; case "TIMESTAMP": primitiveType = PrimitiveType.DATETIME; break; case "DATE": primitiveType = PrimitiveType.DATE; break; case "STRING": return ScalarType.createDefaultExternalTableString(); case "VARCHAR": return ScalarType.createVarcharType(getVarcharLength(hiveType)); case "CHAR": return ScalarType.createCharType(getCharLength(hiveType)); case "BOOLEAN": primitiveType = PrimitiveType.BOOLEAN; break; case "ARRAY": Type type = fromHiveTypeToArrayType(hiveType); if (type.isArrayType()) { return type; } else { return Type.UNKNOWN_TYPE; } case "MAP": Type mapType = fromHiveTypeToMapType(hiveType); if (mapType.isMapType()) { return mapType; } else { return Type.UNKNOWN_TYPE; } case "STRUCT": Type structType = fromHiveTypeToStructType(hiveType); if (structType.isStructType()) { return structType; } else { return Type.UNKNOWN_TYPE; } default: primitiveType = PrimitiveType.UNKNOWN_TYPE; break; } if (primitiveType != PrimitiveType.DECIMAL32) { return ScalarType.createType(primitiveType); } else { int[] parts = getPrecisionAndScale(hiveType); return ScalarType.createUnifiedDecimalType(parts[0], parts[1]); } } public static Type fromHudiType(Schema avroSchema) { Schema.Type columnType = avroSchema.getType(); LogicalType logicalType = avroSchema.getLogicalType(); PrimitiveType primitiveType = null; boolean isConvertedFailed = false; switch (columnType) { case BOOLEAN: primitiveType = PrimitiveType.BOOLEAN; break; case INT: if (logicalType instanceof LogicalTypes.Date) { primitiveType = PrimitiveType.DATE; } else if (logicalType instanceof LogicalTypes.TimeMillis) { primitiveType = PrimitiveType.TIME; } else { primitiveType = PrimitiveType.INT; } break; case LONG: if (logicalType instanceof LogicalTypes.TimeMicros) { primitiveType = PrimitiveType.TIME; } else if (logicalType instanceof LogicalTypes.TimestampMillis || logicalType instanceof LogicalTypes.TimestampMicros) { primitiveType = PrimitiveType.DATETIME; } else { primitiveType = PrimitiveType.BIGINT; } break; case FLOAT: primitiveType = PrimitiveType.FLOAT; break; case DOUBLE: primitiveType = PrimitiveType.DOUBLE; break; case STRING: return ScalarType.createDefaultExternalTableString(); case ARRAY: Type type = new ArrayType(fromHudiType(avroSchema.getElementType())); if (type.isArrayType()) { return type; } else { isConvertedFailed = true; break; } case FIXED: case BYTES: if (logicalType instanceof LogicalTypes.Decimal) { int precision = ((LogicalTypes.Decimal) logicalType).getPrecision(); int scale = ((LogicalTypes.Decimal) logicalType).getScale(); return ScalarType.createUnifiedDecimalType(precision, scale); } else { primitiveType = PrimitiveType.VARCHAR; break; } case RECORD: List<Schema.Field> fields = avroSchema.getFields(); Preconditions.checkArgument(fields.size() > 0); ArrayList<StructField> structFields = new ArrayList<>(fields.size()); for (Schema.Field field : fields) { String fieldName = field.name(); Type fieldType = fromHudiType(field.schema()); if (fieldType.isUnknown()) { isConvertedFailed = true; break; } structFields.add(new StructField(fieldName, fieldType)); } if (!isConvertedFailed) { return new StructType(structFields); } case MAP: Schema value = avroSchema.getValueType(); Type valueType = fromHudiType(value); if (valueType.isUnknown()) { isConvertedFailed = true; break; } if (!isConvertedFailed) { return new MapType(ScalarType.createDefaultExternalTableString(), valueType); } case UNION: List<Schema> nonNullMembers = avroSchema.getTypes().stream() .filter(schema -> !Schema.Type.NULL.equals(schema.getType())) .collect(Collectors.toList()); if (nonNullMembers.size() == 1) { return fromHudiType(nonNullMembers.get(0)); } else { isConvertedFailed = true; break; } case ENUM: default: isConvertedFailed = true; break; } if (isConvertedFailed) { primitiveType = PrimitiveType.UNKNOWN_TYPE; } return ScalarType.createType(primitiveType); } public static String fromHudiTypeToHiveTypeString(Schema avroSchema) { Schema.Type columnType = avroSchema.getType(); LogicalType logicalType = avroSchema.getLogicalType(); switch (columnType) { case BOOLEAN: return "boolean"; case INT: if (logicalType instanceof LogicalTypes.Date) { return "date"; } else if (logicalType instanceof LogicalTypes.TimeMillis) { throw new StarRocksConnectorException("Unsupported hudi {} type of column {}", logicalType.getName(), avroSchema.getName()); } else { return "int"; } case LONG: if (logicalType instanceof LogicalTypes.TimeMicros) { throw new StarRocksConnectorException("Unsupported hudi {} type of column {}", logicalType.getName(), avroSchema.getName()); } else if (logicalType instanceof LogicalTypes.TimestampMillis || logicalType instanceof LogicalTypes.TimestampMicros) { return logicalType.getName(); } else { return "bigint"; } case FLOAT: return "float"; case DOUBLE: return "double"; case STRING: return "string"; case ARRAY: String elementType = fromHudiTypeToHiveTypeString(avroSchema.getElementType()); return String.format("array<%s>", elementType); case FIXED: case BYTES: if (logicalType instanceof LogicalTypes.Decimal) { int precision = ((LogicalTypes.Decimal) logicalType).getPrecision(); int scale = ((LogicalTypes.Decimal) logicalType).getScale(); return String.format("decimal(%s,%s)", precision, scale); } else { return "string"; } case RECORD: List<Schema.Field> fields = avroSchema.getFields(); Preconditions.checkArgument(fields.size() > 0); String nameToType = fields.stream() .map(f -> String.format("%s:%s", f.name(), fromHudiTypeToHiveTypeString(f.schema()))) .collect(Collectors.joining(",")); return String.format("struct<%s>", nameToType); case MAP: Schema value = avroSchema.getValueType(); String valueType = fromHudiTypeToHiveTypeString(value); return String.format("map<%s,%s>", "string", valueType); case UNION: List<Schema> nonNullMembers = avroSchema.getTypes().stream() .filter(schema -> !Schema.Type.NULL.equals(schema.getType())) .collect(Collectors.toList()); return fromHudiTypeToHiveTypeString(nonNullMembers.get(0)); case ENUM: default: throw new StarRocksConnectorException("Unsupported hudi {} type of column {}", avroSchema.getType().getName(), avroSchema.getName()); } } public static Type fromDeltaLakeType(DataType dataType) { if (dataType == null) { return Type.NULL; } PrimitiveType primitiveType; DeltaDataType deltaDataType = DeltaDataType.instanceFrom(dataType.getClass()); switch (deltaDataType) { case BOOLEAN: primitiveType = PrimitiveType.BOOLEAN; break; case BYTE: case TINYINT: primitiveType = PrimitiveType.TINYINT; break; case SMALLINT: primitiveType = PrimitiveType.SMALLINT; break; case INTEGER: primitiveType = PrimitiveType.INT; break; case LONG: primitiveType = PrimitiveType.BIGINT; break; case FLOAT: primitiveType = PrimitiveType.FLOAT; break; case DOUBLE: primitiveType = PrimitiveType.DOUBLE; break; case DATE: primitiveType = PrimitiveType.DATE; break; case TIMESTAMP: primitiveType = PrimitiveType.DATETIME; break; case STRING: return ScalarType.createDefaultExternalTableString(); case DECIMAL: int precision = ((io.delta.standalone.types.DecimalType) dataType).getPrecision(); int scale = ((io.delta.standalone.types.DecimalType) dataType).getScale(); return ScalarType.createUnifiedDecimalType(precision, scale); case ARRAY: Type type = convertToArrayType((io.delta.standalone.types.ArrayType) dataType); if (type.isArrayType()) { return type; } else { return Type.UNKNOWN_TYPE; } case NULL: primitiveType = PrimitiveType.NULL_TYPE; break; case BINARY: case MAP: case STRUCT: default: primitiveType = PrimitiveType.UNKNOWN_TYPE; } return ScalarType.createType(primitiveType); } private static ArrayType convertToArrayTypeForIceberg(org.apache.iceberg.types.Type icebergType) { return new ArrayType(fromIcebergType(icebergType.asNestedType().asListType().elementType())); } private static Type convertToMapTypeForIceberg(org.apache.iceberg.types.Type icebergType) { Type keyType = fromIcebergType(icebergType.asMapType().keyType()); if (keyType.isComplexType() || keyType.isUnknown()) { return Type.UNKNOWN_TYPE; } Type valueType = fromIcebergType(icebergType.asMapType().valueType()); if (valueType.isUnknown()) { return Type.UNKNOWN_TYPE; } return new MapType(keyType, valueType); } private static ArrayType convertToArrayType(io.delta.standalone.types.ArrayType arrayType) { return new ArrayType(fromDeltaLakeType(arrayType.getElementType())); } public static String getTypeKeyword(String type) { String keyword = type; int parenthesesIndex; if ((parenthesesIndex = keyword.indexOf('<')) >= 0) { keyword = keyword.substring(0, parenthesesIndex).trim(); } else if ((parenthesesIndex = keyword.indexOf('(')) >= 0) { keyword = keyword.substring(0, parenthesesIndex).trim(); } return keyword; } public static int[] getPrecisionAndScale(String typeStr) { Matcher matcher = Pattern.compile(DECIMAL_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { return new int[] {Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2))}; } throw new StarRocksConnectorException("Failed to get precision and scale at " + typeStr); } public static Type fromHiveTypeToArrayType(String typeStr) { if (HIVE_UNSUPPORTED_TYPES.stream().anyMatch(typeStr.toUpperCase()::contains)) { return Type.UNKNOWN_TYPE; } Matcher matcher = Pattern.compile(ARRAY_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); Type itemType; if (matcher.find()) { if (fromHiveTypeToArrayType(matcher.group(1)).equals(Type.UNKNOWN_TYPE)) { itemType = Type.UNKNOWN_TYPE; } else { itemType = new ArrayType(fromHiveTypeToArrayType(matcher.group(1))); } } else { itemType = fromHiveType(typeStr); } return itemType; } public static Type fromHiveTypeToStructType(String typeStr) { Matcher matcher = Pattern.compile(STRUCT_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { String str = matcher.group(1); String[] subfields = splitByFirstLevel(str, ','); ArrayList<StructField> structFields = new ArrayList<>(subfields.length); for (String subfield : subfields) { String[] structField = splitByFirstLevel(subfield, ':'); if (structField.length != 2) { throw new StarRocksConnectorException("Error Struct Type" + typeStr); } structFields.add(new StructField(structField[0], fromHiveType(structField[1]))); } return new StructType(structFields); } else { throw new StarRocksConnectorException("Failed to get StructType at " + typeStr); } } public static String[] splitByFirstLevel(String str, char splitter) { int level = 0; int start = 0; List<String> list = new LinkedList<>(); char[] cStr = str.toCharArray(); for (int i = 0; i < cStr.length; i++) { char c = cStr[i]; if (c == '<' || c == '(') { level++; } else if (c == '>' || c == ')') { level--; } else if (c == splitter && level == 0) { list.add(str.substring(start, i).trim()); start = i + 1; } } if (start < cStr.length) { list.add(str.substring(start, cStr.length).trim()); } return list.toArray(new String[] {}); } public static String[] getKeyValueStr(String typeStr) { Matcher matcher = Pattern.compile(MAP_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { String kvStr = matcher.group(1); String[] kvs = splitByFirstLevel(kvStr, ','); if (kvs.length != 2) { throw new StarRocksConnectorException("Error Map Type" + typeStr); } return new String[] {kvs[0], kvs[1]}; } else { throw new StarRocksConnectorException("Failed to get MapType at " + typeStr); } } public static Type fromHiveTypeToMapType(String typeStr) { String[] kv = getKeyValueStr(typeStr); return new MapType(fromHiveType(kv[0]), fromHiveType(kv[1])); } public static int getCharLength(String typeStr) { Matcher matcher = Pattern.compile(CHAR_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { return Integer.parseInt(matcher.group(1)); } throw new StarRocksConnectorException("Failed to get char length at " + typeStr); } public static int getVarcharLength(String typeStr) { Matcher matcher = Pattern.compile(VARCHAR_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { return Integer.parseInt(matcher.group(1)); } throw new StarRocksConnectorException("Failed to get varchar length at " + typeStr); } public static boolean validateColumnType(Type type, Type otherType) { if (type == null || otherType == null) { return false; } if (type == Type.UNKNOWN_TYPE || otherType == Type.UNKNOWN_TYPE) { return false; } if (type.isArrayType()) { if (otherType.isArrayType()) { return validateColumnType(((ArrayType) type).getItemType(), ((ArrayType) otherType).getItemType()); } else { return false; } } if (type.isMapType()) { if (otherType.isMapType()) { return validateColumnType(((MapType) type).getKeyType(), ((MapType) otherType).getKeyType()) && validateColumnType(((MapType) type).getValueType(), ((MapType) otherType).getValueType()); } else { return false; } } if (type.isStructType()) { if (otherType.isStructType()) { StructType structType = (StructType) type; StructType otherStructType = (StructType) otherType; for (int i = 0; i < structType.getFields().size(); i++) { if (!validateColumnType( structType.getField(i).getType(), otherStructType.getField(i).getType())) { return false; } } return true; } else { return false; } } PrimitiveType primitiveType = type.getPrimitiveType(); PrimitiveType otherPrimitiveType = otherType.getPrimitiveType(); switch (primitiveType) { case TINYINT: case SMALLINT: case INT: case BIGINT: case FLOAT: case DOUBLE: case DATETIME: case DATE: case BOOLEAN: case CHAR: return primitiveType == otherPrimitiveType; case VARCHAR: return otherPrimitiveType == PrimitiveType.CHAR || otherPrimitiveType == PrimitiveType.VARCHAR; case DECIMALV2: case DECIMAL32: case DECIMAL64: case DECIMAL128: return otherPrimitiveType.isDecimalOfAnyVersion(); default: return false; } } public static boolean columnEquals(Column base, Column other) { if (base == other) { return true; } if (!base.getName().equalsIgnoreCase(other.getName())) { return false; } if (!base.getType().equals(other.getType())) { return false; } return true; } }
class ColumnTypeConverter { public static final String DECIMAL_PATTERN = "^decimal\\((\\d+), *(\\d+)\\)"; public static final String COMPLEX_PATTERN = "([0-9a-z<>(),:_ ]+)"; public static final String ARRAY_PATTERN = "^array<" + COMPLEX_PATTERN + ">"; public static final String MAP_PATTERN = "^map<" + COMPLEX_PATTERN + ">"; public static final String STRUCT_PATTERN = "^struct<" + COMPLEX_PATTERN + ">"; public static final String CHAR_PATTERN = "^char\\(([0-9]+)\\)"; public static final String VARCHAR_PATTERN = "^varchar\\(([0-9,-1]+)\\)"; protected static final List<String> HIVE_UNSUPPORTED_TYPES = Arrays.asList("BINARY", "UNIONTYPE"); public static Type fromHiveType(String hiveType) { String typeUpperCase = getTypeKeyword(hiveType).toUpperCase(); PrimitiveType primitiveType; switch (typeUpperCase) { case "TINYINT": primitiveType = PrimitiveType.TINYINT; break; case "SMALLINT": primitiveType = PrimitiveType.SMALLINT; break; case "INT": case "INTEGER": primitiveType = PrimitiveType.INT; break; case "BIGINT": primitiveType = PrimitiveType.BIGINT; break; case "FLOAT": primitiveType = PrimitiveType.FLOAT; break; case "DOUBLE": case "DOUBLE PRECISION": primitiveType = PrimitiveType.DOUBLE; break; case "DECIMAL": case "NUMERIC": primitiveType = PrimitiveType.DECIMAL32; break; case "TIMESTAMP": primitiveType = PrimitiveType.DATETIME; break; case "DATE": primitiveType = PrimitiveType.DATE; break; case "STRING": return ScalarType.createDefaultExternalTableString(); case "VARCHAR": return ScalarType.createVarcharType(getVarcharLength(hiveType)); case "CHAR": return ScalarType.createCharType(getCharLength(hiveType)); case "BOOLEAN": primitiveType = PrimitiveType.BOOLEAN; break; case "ARRAY": Type type = fromHiveTypeToArrayType(hiveType); if (type.isArrayType()) { return type; } else { return Type.UNKNOWN_TYPE; } case "MAP": Type mapType = fromHiveTypeToMapType(hiveType); if (mapType.isMapType()) { return mapType; } else { return Type.UNKNOWN_TYPE; } case "STRUCT": Type structType = fromHiveTypeToStructType(hiveType); if (structType.isStructType()) { return structType; } else { return Type.UNKNOWN_TYPE; } default: primitiveType = PrimitiveType.UNKNOWN_TYPE; break; } if (primitiveType != PrimitiveType.DECIMAL32) { return ScalarType.createType(primitiveType); } else { int[] parts = getPrecisionAndScale(hiveType); return ScalarType.createUnifiedDecimalType(parts[0], parts[1]); } } public static Type fromHudiType(Schema avroSchema) { Schema.Type columnType = avroSchema.getType(); LogicalType logicalType = avroSchema.getLogicalType(); PrimitiveType primitiveType = null; boolean isConvertedFailed = false; switch (columnType) { case BOOLEAN: primitiveType = PrimitiveType.BOOLEAN; break; case INT: if (logicalType instanceof LogicalTypes.Date) { primitiveType = PrimitiveType.DATE; } else if (logicalType instanceof LogicalTypes.TimeMillis) { primitiveType = PrimitiveType.TIME; } else { primitiveType = PrimitiveType.INT; } break; case LONG: if (logicalType instanceof LogicalTypes.TimeMicros) { primitiveType = PrimitiveType.TIME; } else if (logicalType instanceof LogicalTypes.TimestampMillis || logicalType instanceof LogicalTypes.TimestampMicros) { primitiveType = PrimitiveType.DATETIME; } else { primitiveType = PrimitiveType.BIGINT; } break; case FLOAT: primitiveType = PrimitiveType.FLOAT; break; case DOUBLE: primitiveType = PrimitiveType.DOUBLE; break; case STRING: return ScalarType.createDefaultExternalTableString(); case ARRAY: Type type = new ArrayType(fromHudiType(avroSchema.getElementType())); if (type.isArrayType()) { return type; } else { isConvertedFailed = true; break; } case FIXED: case BYTES: if (logicalType instanceof LogicalTypes.Decimal) { int precision = ((LogicalTypes.Decimal) logicalType).getPrecision(); int scale = ((LogicalTypes.Decimal) logicalType).getScale(); return ScalarType.createUnifiedDecimalType(precision, scale); } else { primitiveType = PrimitiveType.VARCHAR; break; } case RECORD: List<Schema.Field> fields = avroSchema.getFields(); Preconditions.checkArgument(fields.size() > 0); ArrayList<StructField> structFields = new ArrayList<>(fields.size()); for (Schema.Field field : fields) { String fieldName = field.name(); Type fieldType = fromHudiType(field.schema()); if (fieldType.isUnknown()) { isConvertedFailed = true; break; } structFields.add(new StructField(fieldName, fieldType)); } if (!isConvertedFailed) { return new StructType(structFields); } case MAP: Schema value = avroSchema.getValueType(); Type valueType = fromHudiType(value); if (valueType.isUnknown()) { isConvertedFailed = true; break; } if (!isConvertedFailed) { return new MapType(ScalarType.createDefaultExternalTableString(), valueType); } case UNION: List<Schema> nonNullMembers = avroSchema.getTypes().stream() .filter(schema -> !Schema.Type.NULL.equals(schema.getType())) .collect(Collectors.toList()); if (nonNullMembers.size() == 1) { return fromHudiType(nonNullMembers.get(0)); } else { isConvertedFailed = true; break; } case ENUM: default: isConvertedFailed = true; break; } if (isConvertedFailed) { primitiveType = PrimitiveType.UNKNOWN_TYPE; } return ScalarType.createType(primitiveType); } public static String fromHudiTypeToHiveTypeString(Schema avroSchema) { Schema.Type columnType = avroSchema.getType(); LogicalType logicalType = avroSchema.getLogicalType(); switch (columnType) { case BOOLEAN: return "boolean"; case INT: if (logicalType instanceof LogicalTypes.Date) { return "date"; } else if (logicalType instanceof LogicalTypes.TimeMillis) { throw new StarRocksConnectorException("Unsupported hudi {} type of column {}", logicalType.getName(), avroSchema.getName()); } else { return "int"; } case LONG: if (logicalType instanceof LogicalTypes.TimeMicros) { throw new StarRocksConnectorException("Unsupported hudi {} type of column {}", logicalType.getName(), avroSchema.getName()); } else if (logicalType instanceof LogicalTypes.TimestampMillis || logicalType instanceof LogicalTypes.TimestampMicros) { return logicalType.getName(); } else { return "bigint"; } case FLOAT: return "float"; case DOUBLE: return "double"; case STRING: return "string"; case ARRAY: String elementType = fromHudiTypeToHiveTypeString(avroSchema.getElementType()); return String.format("array<%s>", elementType); case FIXED: case BYTES: if (logicalType instanceof LogicalTypes.Decimal) { int precision = ((LogicalTypes.Decimal) logicalType).getPrecision(); int scale = ((LogicalTypes.Decimal) logicalType).getScale(); return String.format("decimal(%s,%s)", precision, scale); } else { return "string"; } case RECORD: List<Schema.Field> fields = avroSchema.getFields(); Preconditions.checkArgument(fields.size() > 0); String nameToType = fields.stream() .map(f -> String.format("%s:%s", f.name(), fromHudiTypeToHiveTypeString(f.schema()))) .collect(Collectors.joining(",")); return String.format("struct<%s>", nameToType); case MAP: Schema value = avroSchema.getValueType(); String valueType = fromHudiTypeToHiveTypeString(value); return String.format("map<%s,%s>", "string", valueType); case UNION: List<Schema> nonNullMembers = avroSchema.getTypes().stream() .filter(schema -> !Schema.Type.NULL.equals(schema.getType())) .collect(Collectors.toList()); return fromHudiTypeToHiveTypeString(nonNullMembers.get(0)); case ENUM: default: throw new StarRocksConnectorException("Unsupported hudi {} type of column {}", avroSchema.getType().getName(), avroSchema.getName()); } } public static Type fromDeltaLakeType(DataType dataType) { if (dataType == null) { return Type.NULL; } PrimitiveType primitiveType; DeltaDataType deltaDataType = DeltaDataType.instanceFrom(dataType.getClass()); switch (deltaDataType) { case BOOLEAN: primitiveType = PrimitiveType.BOOLEAN; break; case BYTE: case TINYINT: primitiveType = PrimitiveType.TINYINT; break; case SMALLINT: primitiveType = PrimitiveType.SMALLINT; break; case INTEGER: primitiveType = PrimitiveType.INT; break; case LONG: primitiveType = PrimitiveType.BIGINT; break; case FLOAT: primitiveType = PrimitiveType.FLOAT; break; case DOUBLE: primitiveType = PrimitiveType.DOUBLE; break; case DATE: primitiveType = PrimitiveType.DATE; break; case TIMESTAMP: primitiveType = PrimitiveType.DATETIME; break; case STRING: return ScalarType.createDefaultExternalTableString(); case DECIMAL: int precision = ((io.delta.standalone.types.DecimalType) dataType).getPrecision(); int scale = ((io.delta.standalone.types.DecimalType) dataType).getScale(); return ScalarType.createUnifiedDecimalType(precision, scale); case ARRAY: Type type = convertToArrayTypeForDeltaLake((io.delta.standalone.types.ArrayType) dataType); if (type.isArrayType()) { return type; } else { return Type.UNKNOWN_TYPE; } case NULL: primitiveType = PrimitiveType.NULL_TYPE; break; case BINARY: case MAP: case STRUCT: default: primitiveType = PrimitiveType.UNKNOWN_TYPE; } return ScalarType.createType(primitiveType); } private static ArrayType convertToArrayTypeForIceberg(org.apache.iceberg.types.Type icebergType) { return new ArrayType(fromIcebergType(icebergType.asNestedType().asListType().elementType())); } private static Type convertToMapTypeForIceberg(org.apache.iceberg.types.Type icebergType) { Type keyType = fromIcebergType(icebergType.asMapType().keyType()); if (keyType.isComplexType() || keyType.isUnknown()) { return Type.UNKNOWN_TYPE; } Type valueType = fromIcebergType(icebergType.asMapType().valueType()); if (valueType.isUnknown()) { return Type.UNKNOWN_TYPE; } return new MapType(keyType, valueType); } private static ArrayType convertToArrayTypeForDeltaLake(io.delta.standalone.types.ArrayType arrayType) { return new ArrayType(fromDeltaLakeType(arrayType.getElementType())); } public static String getTypeKeyword(String type) { String keyword = type; int parenthesesIndex; if ((parenthesesIndex = keyword.indexOf('<')) >= 0) { keyword = keyword.substring(0, parenthesesIndex).trim(); } else if ((parenthesesIndex = keyword.indexOf('(')) >= 0) { keyword = keyword.substring(0, parenthesesIndex).trim(); } return keyword; } public static int[] getPrecisionAndScale(String typeStr) { Matcher matcher = Pattern.compile(DECIMAL_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { return new int[] {Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2))}; } throw new StarRocksConnectorException("Failed to get precision and scale at " + typeStr); } public static Type fromHiveTypeToArrayType(String typeStr) { if (HIVE_UNSUPPORTED_TYPES.stream().anyMatch(typeStr.toUpperCase()::contains)) { return Type.UNKNOWN_TYPE; } Matcher matcher = Pattern.compile(ARRAY_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); Type itemType; if (matcher.find()) { if (fromHiveTypeToArrayType(matcher.group(1)).equals(Type.UNKNOWN_TYPE)) { itemType = Type.UNKNOWN_TYPE; } else { itemType = new ArrayType(fromHiveTypeToArrayType(matcher.group(1))); } } else { itemType = fromHiveType(typeStr); } return itemType; } public static Type fromHiveTypeToStructType(String typeStr) { Matcher matcher = Pattern.compile(STRUCT_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { String str = matcher.group(1); String[] subfields = splitByFirstLevel(str, ','); ArrayList<StructField> structFields = new ArrayList<>(subfields.length); for (String subfield : subfields) { String[] structField = splitByFirstLevel(subfield, ':'); if (structField.length != 2) { throw new StarRocksConnectorException("Error Struct Type" + typeStr); } structFields.add(new StructField(structField[0], fromHiveType(structField[1]))); } return new StructType(structFields); } else { throw new StarRocksConnectorException("Failed to get StructType at " + typeStr); } } public static String[] splitByFirstLevel(String str, char splitter) { int level = 0; int start = 0; List<String> list = new LinkedList<>(); char[] cStr = str.toCharArray(); for (int i = 0; i < cStr.length; i++) { char c = cStr[i]; if (c == '<' || c == '(') { level++; } else if (c == '>' || c == ')') { level--; } else if (c == splitter && level == 0) { list.add(str.substring(start, i).trim()); start = i + 1; } } if (start < cStr.length) { list.add(str.substring(start, cStr.length).trim()); } return list.toArray(new String[] {}); } public static String[] getKeyValueStr(String typeStr) { Matcher matcher = Pattern.compile(MAP_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { String kvStr = matcher.group(1); String[] kvs = splitByFirstLevel(kvStr, ','); if (kvs.length != 2) { throw new StarRocksConnectorException("Error Map Type" + typeStr); } return new String[] {kvs[0], kvs[1]}; } else { throw new StarRocksConnectorException("Failed to get MapType at " + typeStr); } } public static Type fromHiveTypeToMapType(String typeStr) { String[] kv = getKeyValueStr(typeStr); return new MapType(fromHiveType(kv[0]), fromHiveType(kv[1])); } public static int getCharLength(String typeStr) { Matcher matcher = Pattern.compile(CHAR_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { return Integer.parseInt(matcher.group(1)); } throw new StarRocksConnectorException("Failed to get char length at " + typeStr); } public static int getVarcharLength(String typeStr) { Matcher matcher = Pattern.compile(VARCHAR_PATTERN).matcher(typeStr.toLowerCase(Locale.ROOT)); if (matcher.find()) { return Integer.parseInt(matcher.group(1)); } throw new StarRocksConnectorException("Failed to get varchar length at " + typeStr); } public static boolean validateColumnType(Type type, Type otherType) { if (type == null || otherType == null) { return false; } if (type == Type.UNKNOWN_TYPE || otherType == Type.UNKNOWN_TYPE) { return false; } if (type.isArrayType()) { if (otherType.isArrayType()) { return validateColumnType(((ArrayType) type).getItemType(), ((ArrayType) otherType).getItemType()); } else { return false; } } if (type.isMapType()) { if (otherType.isMapType()) { return validateColumnType(((MapType) type).getKeyType(), ((MapType) otherType).getKeyType()) && validateColumnType(((MapType) type).getValueType(), ((MapType) otherType).getValueType()); } else { return false; } } if (type.isStructType()) { if (otherType.isStructType()) { StructType structType = (StructType) type; StructType otherStructType = (StructType) otherType; for (int i = 0; i < structType.getFields().size(); i++) { if (!validateColumnType( structType.getField(i).getType(), otherStructType.getField(i).getType())) { return false; } } return true; } else { return false; } } PrimitiveType primitiveType = type.getPrimitiveType(); PrimitiveType otherPrimitiveType = otherType.getPrimitiveType(); switch (primitiveType) { case TINYINT: case SMALLINT: case INT: case BIGINT: case FLOAT: case DOUBLE: case DATETIME: case DATE: case BOOLEAN: case CHAR: return primitiveType == otherPrimitiveType; case VARCHAR: return otherPrimitiveType == PrimitiveType.CHAR || otherPrimitiveType == PrimitiveType.VARCHAR; case DECIMALV2: case DECIMAL32: case DECIMAL64: case DECIMAL128: return otherPrimitiveType.isDecimalOfAnyVersion(); default: return false; } } public static boolean columnEquals(Column base, Column other) { if (base == other) { return true; } if (!base.getName().equalsIgnoreCase(other.getName())) { return false; } if (!base.getType().equals(other.getType())) { return false; } return true; } }
Can we move this line into the open method?
public void emitResults() throws IOException { byte[] udfResult; while ((udfResult = userDefinedFunctionResultQueue.poll()) != null) { bais.setBuffer(udfResult, 0, udfResult.length); reader.loadNextBatch(); VectorSchemaRoot root = reader.getVectorSchemaRoot(); if (arrowReader == null) { arrowReader = ArrowUtils.createBaseRowArrowReader(root); } for (int i = 0; i < root.getRowCount(); i++) { BaseRow input = forwardedInputQueue.poll(); reuseJoinedRow.setHeader(input.getHeader()); baseRowWrapper.collect(reuseJoinedRow.replace(input, arrowReader.read(i))); } } }
arrowReader = ArrowUtils.createBaseRowArrowReader(root);
public void emitResults() throws IOException { byte[] udfResult; while ((udfResult = userDefinedFunctionResultQueue.poll()) != null) { bais.setBuffer(udfResult, 0, udfResult.length); reader.loadNextBatch(); VectorSchemaRoot root = reader.getVectorSchemaRoot(); if (arrowReader == null) { arrowReader = ArrowUtils.createBaseRowArrowReader(root); } for (int i = 0; i < root.getRowCount(); i++) { BaseRow input = forwardedInputQueue.poll(); reuseJoinedRow.setHeader(input.getHeader()); baseRowWrapper.collect(reuseJoinedRow.replace(input, arrowReader.read(i))); } } }
class BaseRowArrowPythonScalarFunctionOperator extends AbstractBaseRowPythonScalarFunctionOperator { private static final long serialVersionUID = 1L; /** * Allocator which is used for byte buffer allocation. */ private transient BufferAllocator allocator; /** * Reader which is responsible for deserialize the Arrow format data to the Flink rows. */ private transient ArrowReader<BaseRow> arrowReader; /** * Reader which is responsible for convert the execution result from * byte array to arrow format. */ private transient ArrowStreamReader reader; /** * The JoinedRow reused holding the execution result. */ private transient JoinedRow reuseJoinedRow; public BaseRowArrowPythonScalarFunctionOperator( Configuration config, PythonFunctionInfo[] scalarFunctions, RowType inputType, RowType outputType, int[] udfInputOffsets, int[] forwardedFields) { super(config, scalarFunctions, inputType, outputType, udfInputOffsets, forwardedFields); } @Override public void open() throws Exception { super.open(); allocator = ArrowUtils.ROOT_ALLOCATOR.newChildAllocator( "reader", 0, Long.MAX_VALUE); reader = new ArrowStreamReader(bais, allocator); reuseJoinedRow = new JoinedRow(); } @Override public void close() throws Exception { try { super.close(); } finally { reader.close(); allocator.close(); } } @Override public PythonFunctionRunner<BaseRow> createPythonFunctionRunner( FnDataReceiver<byte[]> resultReceiver, PythonEnvironmentManager pythonEnvironmentManager) { return new BaseRowArrowPythonScalarFunctionRunner( getRuntimeContext().getTaskName(), resultReceiver, scalarFunctions, pythonEnvironmentManager, userDefinedFunctionInputType, userDefinedFunctionOutputType, getPythonConfig().getMaxArrowBatchSize()); } @Override @SuppressWarnings("ConstantConditions") }
class BaseRowArrowPythonScalarFunctionOperator extends AbstractBaseRowPythonScalarFunctionOperator { private static final long serialVersionUID = 1L; /** * Allocator which is used for byte buffer allocation. */ private transient BufferAllocator allocator; /** * Reader which is responsible for deserialize the Arrow format data to the Flink rows. */ private transient ArrowReader<BaseRow> arrowReader; /** * Reader which is responsible for convert the execution result from * byte array to arrow format. */ private transient ArrowStreamReader reader; public BaseRowArrowPythonScalarFunctionOperator( Configuration config, PythonFunctionInfo[] scalarFunctions, RowType inputType, RowType outputType, int[] udfInputOffsets, int[] forwardedFields) { super(config, scalarFunctions, inputType, outputType, udfInputOffsets, forwardedFields); } @Override public void open() throws Exception { super.open(); allocator = ArrowUtils.ROOT_ALLOCATOR.newChildAllocator("reader", 0, Long.MAX_VALUE); reader = new ArrowStreamReader(bais, allocator); } @Override public void close() throws Exception { try { super.close(); } finally { reader.close(); allocator.close(); } } @Override public PythonFunctionRunner<BaseRow> createPythonFunctionRunner( FnDataReceiver<byte[]> resultReceiver, PythonEnvironmentManager pythonEnvironmentManager) { return new BaseRowArrowPythonScalarFunctionRunner( getRuntimeContext().getTaskName(), resultReceiver, scalarFunctions, pythonEnvironmentManager, userDefinedFunctionInputType, userDefinedFunctionOutputType, getPythonConfig().getMaxArrowBatchSize()); } @Override @SuppressWarnings("ConstantConditions") }
I did another deep search - I found one case in SQL's codegen which I fixed, but can't find anything else.
private static List<TableFieldSchema> toTableFieldSchema(Schema schema) { List<TableFieldSchema> fields = new ArrayList<>(schema.getFieldCount()); for (Field schemaField : schema.getFields()) { FieldType type = schemaField.getType(); TableFieldSchema field = new TableFieldSchema().setName(schemaField.getName()); if (schemaField.getDescription() != null && !"".equals(schemaField.getDescription())) { field.setDescription(schemaField.getDescription()); } if (!schemaField.getType().getNullable()) { field.setMode(Mode.REQUIRED.toString()); } if (type.getTypeName().isCollectionType()) { type = type.getCollectionElementType(); if (type.getTypeName().isCollectionType() || type.getTypeName().isMapType()) { throw new IllegalArgumentException("Array of collection is not supported in BigQuery."); } field.setMode(Mode.REPEATED.toString()); } if (TypeName.ROW == type.getTypeName()) { Schema subType = type.getRowSchema(); field.setFields(toTableFieldSchema(subType)); } if (TypeName.MAP == type.getTypeName()) { throw new IllegalArgumentException("Maps are not supported in BigQuery."); } field.setType(toStandardSQLTypeName(type).toString()); fields.add(field); } return fields; }
if (type.getTypeName().isCollectionType()) {
private static List<TableFieldSchema> toTableFieldSchema(Schema schema) { List<TableFieldSchema> fields = new ArrayList<>(schema.getFieldCount()); for (Field schemaField : schema.getFields()) { FieldType type = schemaField.getType(); TableFieldSchema field = new TableFieldSchema().setName(schemaField.getName()); if (schemaField.getDescription() != null && !"".equals(schemaField.getDescription())) { field.setDescription(schemaField.getDescription()); } if (!schemaField.getType().getNullable()) { field.setMode(Mode.REQUIRED.toString()); } if (type.getTypeName().isCollectionType()) { type = type.getCollectionElementType(); if (type.getTypeName().isCollectionType() || type.getTypeName().isMapType()) { throw new IllegalArgumentException("Array of collection is not supported in BigQuery."); } field.setMode(Mode.REPEATED.toString()); } if (TypeName.ROW == type.getTypeName()) { Schema subType = type.getRowSchema(); field.setFields(toTableFieldSchema(subType)); } if (TypeName.MAP == type.getTypeName()) { throw new IllegalArgumentException("Maps are not supported in BigQuery."); } field.setType(toStandardSQLTypeName(type).toString()); fields.add(field); } return fields; }
class Builder { public abstract Builder setTruncateTimestamps(TruncateTimestamps truncateTimestamps); public abstract ConversionOptions build(); }
class Builder { public abstract Builder setTruncateTimestamps(TruncateTimestamps truncateTimestamps); public abstract ConversionOptions build(); }
Timestamp is the sort key here. As our OrderedListState interface is based on TimestampedValue<T>, the sort key is actually an Instant, but I agree that I should be consistent here by using "sort key". The reason why I put the value encoded bytes before the sort key encoded bytes is that the same order is used in the coder of TimestampedValue<T>. https://github.com/apache/beam/blob/0d46e304f176847e897eef244c50cb29af8c6451/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java#L110 Here, we pack the data into the output stream as if we are calling `TimestampedValue.encode()`. However, we cannot call encode() directly, because we don't want to decode the value and encode it again here. (In my previous attempt, I did try to decode the value and encode it again. It seems to introduce unnecessary overhead. https://github.com/apache/beam/pull/30317/commits/d06e0a5ebe8405334112a2d797bb5c9571b46b9c)
public CompletableFuture<StateResponse> handle(StateRequest.Builder requestBuilder) { assertEquals("", requestBuilder.getId()); requestBuilder.setId(generateId()); StateRequest request = requestBuilder.build(); StateKey key = request.getStateKey(); StateResponse.Builder response; assertNotEquals(RequestCase.REQUEST_NOT_SET, request.getRequestCase()); assertNotEquals(TypeCase.TYPE_NOT_SET, key.getTypeCase()); if (key.getTypeCase() == TypeCase.MULTIMAP_SIDE_INPUT || key.getTypeCase() == TypeCase.RUNNER) { assertEquals(GET, request.getRequestCase()); } if (key.getTypeCase() == TypeCase.MULTIMAP_KEYS_VALUES_SIDE_INPUT && !data.containsKey(key)) { throw new UnsupportedOperationException("No multimap keys values states provided."); } switch (request.getRequestCase()) { case GET: { List<ByteString> byteStrings = data.getOrDefault(request.getStateKey(), Collections.singletonList(ByteString.EMPTY)); int block = 0; if (request.getGet().getContinuationToken().size() > 0) { block = Integer.parseInt(request.getGet().getContinuationToken().toStringUtf8()); } ByteString returnBlock = byteStrings.get(block); ByteString continuationToken = ByteString.EMPTY; if (byteStrings.size() > block + 1) { continuationToken = ByteString.copyFromUtf8(Integer.toString(block + 1)); } response = StateResponse.newBuilder() .setGet( StateGetResponse.newBuilder() .setData(returnBlock) .setContinuationToken(continuationToken)); } break; case CLEAR: data.remove(request.getStateKey()); response = StateResponse.newBuilder().setClear(StateClearResponse.getDefaultInstance()); break; case APPEND: List<ByteString> previousValue = data.computeIfAbsent(request.getStateKey(), (unused) -> new ArrayList<>()); previousValue.add(request.getAppend().getData()); response = StateResponse.newBuilder().setAppend(StateAppendResponse.getDefaultInstance()); break; case ORDERED_LIST_GET: { long start = request.getOrderedListGet().getRange().getStart(); long end = request.getOrderedListGet().getRange().getEnd(); KvCoder<Long, Integer> coder = KvCoder.of(VarLongCoder.of(), VarIntCoder.of()); long sortKey = start; int index = 0; if (request.getOrderedListGet().getContinuationToken().size() > 0) { try { KV<Long, Integer> cursor = coder.decode(request.getOrderedListGet().getContinuationToken().newInput()); sortKey = cursor.getKey(); index = cursor.getValue(); } catch (IOException e) { throw new RuntimeException(e); } } ByteString continuationToken; ByteString returnBlock = ByteString.EMPTY; ; try { if (sortKey < start || sortKey >= end) { throw new IndexOutOfBoundsException("sort key out of range"); } NavigableSet<Long> subset = orderedListKeys .getOrDefault(request.getStateKey(), new TreeSet<>()) .subSet(sortKey, true, end, false); Long nextSortKey = subset.first(); StateKey.Builder keyBuilder = request.getStateKey().toBuilder(); keyBuilder.getOrderedListUserStateBuilder().setSortKey(nextSortKey); List<ByteString> byteStrings = data.getOrDefault(keyBuilder.build(), Collections.singletonList(ByteString.EMPTY)); returnBlock = byteStrings.get(index); if (byteStrings.size() > index + 1) { index += 1; } else { nextSortKey = subset.tailSet(nextSortKey, false).first(); index = 0; } ByteStringOutputStream outputStream = new ByteStringOutputStream(); try { KV<Long, Integer> cursor = KV.of(nextSortKey, index); coder.encode(cursor, outputStream); } catch (IOException e) { throw new RuntimeException(e); } continuationToken = outputStream.toByteString(); } catch (NoSuchElementException | IndexOutOfBoundsException e) { continuationToken = ByteString.EMPTY; } response = StateResponse.newBuilder() .setOrderedListGet( OrderedListStateGetResponse.newBuilder() .setData(returnBlock) .setContinuationToken(continuationToken)); } break; case ORDERED_LIST_UPDATE: for (OrderedListRange r : request.getOrderedListUpdate().getDeletesList()) { List<Long> keysToRemove = new ArrayList<>( orderedListKeys .getOrDefault(request.getStateKey(), new TreeSet<>()) .subSet(r.getStart(), true, r.getEnd(), false)); for (Long l : keysToRemove) { StateKey.Builder keyBuilder = request.getStateKey().toBuilder(); keyBuilder.getOrderedListUserStateBuilder().setSortKey(l); data.remove(keyBuilder.build()); orderedListKeys.get(request.getStateKey()).remove(l); } } for (OrderedListEntry e : request.getOrderedListUpdate().getInsertsList()) { StateKey.Builder keyBuilder = request.getStateKey().toBuilder(); keyBuilder.getOrderedListUserStateBuilder().setSortKey(e.getSortKey()); ByteStringOutputStream outStream = new ByteStringOutputStream(); try { InstantCoder.of().encode(Instant.ofEpochMilli(e.getSortKey()), outStream); } catch (IOException ex) { throw new RuntimeException(ex); } ByteString output = e.getData().concat(outStream.toByteString()); List<ByteString> previousValues = data.computeIfAbsent(keyBuilder.build(), (unused) -> new ArrayList<>()); previousValues.add(output); orderedListKeys .computeIfAbsent(request.getStateKey(), (unused) -> new TreeSet<>()) .add(e.getSortKey()); } response = StateResponse.newBuilder() .setOrderedListUpdate(OrderedListStateUpdateResponse.getDefaultInstance()); break; default: throw new IllegalStateException( String.format("Unknown request type %s", request.getRequestCase())); } return CompletableFuture.completedFuture(response.setId(requestBuilder.getId()).build()); }
public CompletableFuture<StateResponse> handle(StateRequest.Builder requestBuilder) { assertEquals("", requestBuilder.getId()); requestBuilder.setId(generateId()); StateRequest request = requestBuilder.build(); StateKey key = request.getStateKey(); StateResponse.Builder response; assertNotEquals(RequestCase.REQUEST_NOT_SET, request.getRequestCase()); assertNotEquals(TypeCase.TYPE_NOT_SET, key.getTypeCase()); if (key.getTypeCase() == TypeCase.MULTIMAP_SIDE_INPUT || key.getTypeCase() == TypeCase.RUNNER) { assertEquals(GET, request.getRequestCase()); } if (key.getTypeCase() == TypeCase.MULTIMAP_KEYS_VALUES_SIDE_INPUT && !data.containsKey(key)) { throw new UnsupportedOperationException("No multimap keys values states provided."); } switch (request.getRequestCase()) { case GET: if (key.getTypeCase() == TypeCase.ORDERED_LIST_USER_STATE) { long start = key.getOrderedListUserState().getRange().getStart(); long end = key.getOrderedListUserState().getRange().getEnd(); KvCoder<Long, Integer> coder = KvCoder.of(VarLongCoder.of(), VarIntCoder.of()); long sortKey = start; int index = 0; if (!request.getGet().getContinuationToken().isEmpty()) { try { KV<Long, Integer> cursor = coder.decode(request.getGet().getContinuationToken().newInput()); sortKey = cursor.getKey(); index = cursor.getValue(); } catch (IOException e) { throw new RuntimeException(e); } } ByteString continuationToken; ByteString returnBlock = ByteString.EMPTY; try { if (sortKey < start || sortKey >= end) { throw new IndexOutOfBoundsException("sort key out of range"); } StateKey.Builder stateKeyWithoutRange = request.getStateKey().toBuilder(); stateKeyWithoutRange.getOrderedListUserStateBuilder().clearRange(); NavigableSet<Long> subset = orderedListSortKeysFromStateKey .getOrDefault(stateKeyWithoutRange.build(), new TreeSet<>()) .subSet(sortKey, true, end, false); Long nextSortKey = subset.first(); StateKey.Builder keyBuilder = request.getStateKey().toBuilder(); keyBuilder .getOrderedListUserStateBuilder() .getRangeBuilder() .setStart(nextSortKey) .setEnd(nextSortKey + 1); List<ByteString> byteStrings = data.getOrDefault(keyBuilder.build(), Collections.singletonList(ByteString.EMPTY)); returnBlock = byteStrings.get(index); if (byteStrings.size() > index + 1) { index += 1; } else { nextSortKey = subset.tailSet(nextSortKey, false).first(); index = 0; } ByteStringOutputStream outputStream = new ByteStringOutputStream(); try { KV<Long, Integer> cursor = KV.of(nextSortKey, index); coder.encode(cursor, outputStream); } catch (IOException e) { throw new RuntimeException(e); } continuationToken = outputStream.toByteString(); } catch (NoSuchElementException | IndexOutOfBoundsException e) { continuationToken = ByteString.EMPTY; } response = StateResponse.newBuilder() .setGet( StateGetResponse.newBuilder() .setData(returnBlock) .setContinuationToken(continuationToken)); } else { List<ByteString> byteStrings = data.getOrDefault(request.getStateKey(), Collections.singletonList(ByteString.EMPTY)); int block = 0; if (!request.getGet().getContinuationToken().isEmpty()) { block = Integer.parseInt(request.getGet().getContinuationToken().toStringUtf8()); } ByteString returnBlock = byteStrings.get(block); ByteString continuationToken = ByteString.EMPTY; if (byteStrings.size() > block + 1) { continuationToken = ByteString.copyFromUtf8(Integer.toString(block + 1)); } response = StateResponse.newBuilder() .setGet( StateGetResponse.newBuilder() .setData(returnBlock) .setContinuationToken(continuationToken)); } break; case CLEAR: if (key.getTypeCase() == TypeCase.ORDERED_LIST_USER_STATE) { OrderedListRange r = request.getStateKey().getOrderedListUserState().getRange(); StateKey.Builder stateKeyWithoutRange = request.getStateKey().toBuilder(); stateKeyWithoutRange.getOrderedListUserStateBuilder().clearRange(); List<Long> keysToRemove = new ArrayList<>( orderedListSortKeysFromStateKey .getOrDefault(stateKeyWithoutRange.build(), new TreeSet<>()) .subSet(r.getStart(), true, r.getEnd(), false)); for (Long l : keysToRemove) { StateKey.Builder keyBuilder = request.getStateKey().toBuilder(); keyBuilder.getOrderedListUserStateBuilder().getRangeBuilder().setStart(l).setEnd(l + 1); data.remove(keyBuilder.build()); orderedListSortKeysFromStateKey.get(stateKeyWithoutRange.build()).remove(l); } } else { data.remove(request.getStateKey()); } response = StateResponse.newBuilder().setClear(StateClearResponse.getDefaultInstance()); break; case APPEND: if (key.getTypeCase() == TypeCase.ORDERED_LIST_USER_STATE) { InputStream inStream = request.getAppend().getData().newInput(); TimestampedValueCoder<byte[]> coder = TimestampedValueCoder.of(ByteArrayCoder.of()); try { while (inStream.available() > 0) { TimestampedValue<byte[]> tv = coder.decode(inStream); ByteStringOutputStream outStream = new ByteStringOutputStream(); coder.encode(tv, outStream); ByteString output = outStream.toByteString(); StateKey.Builder keyBuilder = request.getStateKey().toBuilder(); long sortKey = tv.getTimestamp().getMillis(); keyBuilder .getOrderedListUserStateBuilder() .getRangeBuilder() .setStart(sortKey) .setEnd(sortKey + 1); List<ByteString> previousValues = data.computeIfAbsent(keyBuilder.build(), (unused) -> new ArrayList<>()); previousValues.add(output); StateKey.Builder stateKeyWithoutRange = request.getStateKey().toBuilder(); stateKeyWithoutRange.getOrderedListUserStateBuilder().clearRange(); orderedListSortKeysFromStateKey .computeIfAbsent(stateKeyWithoutRange.build(), (unused) -> new TreeSet<>()) .add(sortKey); } } catch (IOException ex) { throw new RuntimeException(ex); } } else { List<ByteString> previousValue = data.computeIfAbsent(request.getStateKey(), (unused) -> new ArrayList<>()); previousValue.add(request.getAppend().getData()); } response = StateResponse.newBuilder().setAppend(StateAppendResponse.getDefaultInstance()); break; default: throw new IllegalStateException( String.format("Unknown request type %s", request.getRequestCase())); } return CompletableFuture.completedFuture(response.setId(requestBuilder.getId()).build()); }
class FakeBeamFnStateClient implements BeamFnStateClient { private static final int DEFAULT_CHUNK_SIZE = 6; private final Map<StateKey, List<ByteString>> data; private int currentId; private final Map<StateKey, NavigableSet<Long>> orderedListKeys; public <V> FakeBeamFnStateClient(Coder<V> valueCoder, Map<StateKey, List<V>> initialData) { this(valueCoder, initialData, DEFAULT_CHUNK_SIZE); } public <V> FakeBeamFnStateClient( Coder<V> valueCoder, Map<StateKey, List<V>> initialData, int chunkSize) { this(Maps.transformValues(initialData, (value) -> KV.of(valueCoder, value)), chunkSize); } public FakeBeamFnStateClient(Map<StateKey, KV<Coder<?>, List<?>>> initialData) { this(initialData, DEFAULT_CHUNK_SIZE); } public FakeBeamFnStateClient(Map<StateKey, KV<Coder<?>, List<?>>> initialData, int chunkSize) { Map<StateKey, List<ByteString>> encodedData = new HashMap<>( Maps.transformValues( initialData, (KV<Coder<?>, List<?>> coderAndValues) -> { List<ByteString> chunks = new ArrayList<>(); ByteStringOutputStream output = new ByteStringOutputStream(); for (Object value : coderAndValues.getValue()) { try { ((Coder<Object>) coderAndValues.getKey()).encode(value, output); } catch (IOException e) { throw new RuntimeException(e); } if (output.size() >= chunkSize) { ByteString chunk = output.toByteStringAndReset(); int i = 0; for (; i + chunkSize <= chunk.size(); i += chunkSize) { chunks.add( ByteString.copyFrom(chunk.substring(i, i + chunkSize).toByteArray())); } if (i < chunk.size()) { chunks.add( ByteString.copyFrom(chunk.substring(i, chunk.size()).toByteArray())); } } } if (output.size() > 0) { chunks.add(output.toByteString()); } return chunks; })); Map<StateKey, Coder<Object>> orderedListInitialData = new HashMap<>( Maps.transformValues( Maps.filterKeys( initialData, (k) -> k.getTypeCase() == TypeCase.ORDERED_LIST_USER_STATE), (v) -> { assert v.getKey() instanceof TimestampedValueCoder; return ((TimestampedValueCoder<Object>) v.getKey()).getValueCoder(); })); this.orderedListKeys = new HashMap<>(); for (Map.Entry<StateKey, Coder<Object>> entry : orderedListInitialData.entrySet()) { long sortKey = entry.getKey().getOrderedListUserState().getSortKey(); StateKey.Builder keyBuilder = entry.getKey().toBuilder(); keyBuilder.getOrderedListUserStateBuilder().clearSortKey(); this.orderedListKeys .computeIfAbsent(keyBuilder.build(), (unused) -> new TreeSet<>()) .add(sortKey); } this.data = new ConcurrentHashMap<>( Maps.filterValues(encodedData, byteStrings -> !byteStrings.isEmpty())); } public Map<StateKey, ByteString> getData() { return Maps.transformValues( data, bs -> { ByteString all = ByteString.EMPTY; for (ByteString b : bs) { all = all.concat(b); } return all; }); } public Map<StateKey, List<ByteString>> getRawData() { return data; } @Override private String generateId() { return Integer.toString(++currentId); } public int getCallCount() { return currentId; } }
class FakeBeamFnStateClient implements BeamFnStateClient { private static final int DEFAULT_CHUNK_SIZE = 6; private final Map<StateKey, List<ByteString>> data; private int currentId; private final Map<StateKey, NavigableSet<Long>> orderedListSortKeysFromStateKey; public <V> FakeBeamFnStateClient(Coder<V> valueCoder, Map<StateKey, List<V>> initialData) { this(valueCoder, initialData, DEFAULT_CHUNK_SIZE); } public <V> FakeBeamFnStateClient( Coder<V> valueCoder, Map<StateKey, List<V>> initialData, int chunkSize) { this(Maps.transformValues(initialData, (value) -> KV.of(valueCoder, value)), chunkSize); } public FakeBeamFnStateClient(Map<StateKey, KV<Coder<?>, List<?>>> initialData) { this(initialData, DEFAULT_CHUNK_SIZE); } public FakeBeamFnStateClient(Map<StateKey, KV<Coder<?>, List<?>>> initialData, int chunkSize) { Map<StateKey, List<ByteString>> encodedData = new HashMap<>( Maps.transformValues( initialData, (KV<Coder<?>, List<?>> coderAndValues) -> { List<ByteString> chunks = new ArrayList<>(); ByteStringOutputStream output = new ByteStringOutputStream(); for (Object value : coderAndValues.getValue()) { try { ((Coder<Object>) coderAndValues.getKey()).encode(value, output); } catch (IOException e) { throw new RuntimeException(e); } if (output.size() >= chunkSize) { ByteString chunk = output.toByteStringAndReset(); int i = 0; for (; i + chunkSize <= chunk.size(); i += chunkSize) { chunks.add( ByteString.copyFrom(chunk.substring(i, i + chunkSize).toByteArray())); } if (i < chunk.size()) { chunks.add( ByteString.copyFrom(chunk.substring(i, chunk.size()).toByteArray())); } } } if (output.size() > 0) { chunks.add(output.toByteString()); } return chunks; })); List<StateKey> orderedListStateKeys = initialData.keySet().stream() .filter((k) -> k.getTypeCase() == TypeCase.ORDERED_LIST_USER_STATE) .collect(Collectors.toList()); this.orderedListSortKeysFromStateKey = new HashMap<>(); for (StateKey key : orderedListStateKeys) { long sortKey = key.getOrderedListUserState().getRange().getStart(); StateKey.Builder keyBuilder = key.toBuilder(); keyBuilder.getOrderedListUserStateBuilder().clearRange(); this.orderedListSortKeysFromStateKey .computeIfAbsent(keyBuilder.build(), (unused) -> new TreeSet<>()) .add(sortKey); } this.data = new ConcurrentHashMap<>( Maps.filterValues(encodedData, byteStrings -> !byteStrings.isEmpty())); } public Map<StateKey, ByteString> getData() { return Maps.transformValues( data, bs -> { ByteString all = ByteString.EMPTY; for (ByteString b : bs) { all = all.concat(b); } return all; }); } public Map<StateKey, List<ByteString>> getRawData() { return data; } @Override private String generateId() { return Integer.toString(++currentId); } public int getCallCount() { return currentId; } }
Hi @totalo This change causes build failed on JDK11 and JDK17. Is there any problem before? ``` [ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.8.0:compile (default-compile) on project shardingsphere-infra-common: Compilation failure [ERROR] /Users/wuweijie/IdeaProjects/shardingsphere/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/yaml/schema/swapper/SchemaYamlSwapper.java:[48,25] incompatible types: java.lang.Object cannot be converted to java.util.Map<java.lang.String,org.apache.shardingsphere.infra.yaml.schema.pojo.YamlTableMetaData> [ERROR] [ERROR] -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <args> -rf :shardingsphere-infra-common ```
public YamlSchema swapToYamlConfiguration(final ShardingSphereSchema schema) { Map<String, YamlTableMetaData> tables = schema.getAllTableNames().stream() .collect(Collectors.<String, String, YamlTableMetaData, Map>toMap(each -> each, each -> swapYamlTable(schema.get(each)), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); YamlSchema result = new YamlSchema(); result.setTables(tables); return result; }
.collect(Collectors.<String, String, YamlTableMetaData, Map>toMap(each -> each, each -> swapYamlTable(schema.get(each)), (oldValue, currentValue) -> oldValue, LinkedHashMap::new));
public YamlSchema swapToYamlConfiguration(final ShardingSphereSchema schema) { Map<String, YamlTableMetaData> tables = schema.getAllTableNames().stream() .collect(Collectors.<String, String, YamlTableMetaData, Map>toMap(each -> each, each -> swapYamlTable(schema.get(each)), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); YamlSchema result = new YamlSchema(); result.setTables(tables); return result; }
class SchemaYamlSwapper implements YamlConfigurationSwapper<YamlSchema, ShardingSphereSchema> { @Override @Override public ShardingSphereSchema swapToObject(final YamlSchema yamlConfig) { return Optional.ofNullable(yamlConfig).map(this::swapSchema).orElseGet(ShardingSphereSchema::new); } private ShardingSphereSchema swapSchema(final YamlSchema schema) { return new ShardingSphereSchema(MapUtils.isEmpty(schema.getTables()) ? Maps.newLinkedHashMap() : schema.getTables().entrySet().stream() .collect(Collectors.toMap(Entry::getKey, entry -> swapTable(entry.getKey(), entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new))); } private TableMetaData swapTable(final String tableName, final YamlTableMetaData table) { return new TableMetaData(tableName, swapColumns(table.getColumns()), swapIndexes(table.getIndexes())); } private Collection<IndexMetaData> swapIndexes(final Map<String, YamlIndexMetaData> indexes) { return null == indexes ? Collections.emptyList() : indexes.values().stream().map(this::swapIndex).collect(Collectors.toList()); } private IndexMetaData swapIndex(final YamlIndexMetaData index) { return new IndexMetaData(index.getName()); } private Collection<ColumnMetaData> swapColumns(final Map<String, YamlColumnMetaData> indexes) { return null == indexes ? Collections.emptyList() : indexes.values().stream().map(this::swapColumn).collect(Collectors.toList()); } private ColumnMetaData swapColumn(final YamlColumnMetaData column) { return new ColumnMetaData(column.getName(), column.getDataType(), column.isPrimaryKey(), column.isGenerated(), column.isCaseSensitive()); } private YamlTableMetaData swapYamlTable(final TableMetaData table) { YamlTableMetaData result = new YamlTableMetaData(); result.setColumns(swapYamlColumns(table.getColumns())); result.setIndexes(swapYamlIndexes(table.getIndexes())); return result; } private Map<String, YamlIndexMetaData> swapYamlIndexes(final Map<String, IndexMetaData> indexes) { return indexes.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapYamlIndex(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } private YamlIndexMetaData swapYamlIndex(final IndexMetaData index) { YamlIndexMetaData result = new YamlIndexMetaData(); result.setName(index.getName()); return result; } private Map<String, YamlColumnMetaData> swapYamlColumns(final Map<String, ColumnMetaData> columns) { return columns.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapYamlColumn(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } private YamlColumnMetaData swapYamlColumn(final ColumnMetaData column) { YamlColumnMetaData result = new YamlColumnMetaData(); result.setName(column.getName()); result.setCaseSensitive(column.isCaseSensitive()); result.setGenerated(column.isGenerated()); result.setPrimaryKey(column.isPrimaryKey()); result.setDataType(result.getDataType()); result.setDataTypeName(result.getDataTypeName()); return result; } }
class SchemaYamlSwapper implements YamlConfigurationSwapper<YamlSchema, ShardingSphereSchema> { @Override @Override public ShardingSphereSchema swapToObject(final YamlSchema yamlConfig) { return Optional.ofNullable(yamlConfig).map(this::swapSchema).orElseGet(ShardingSphereSchema::new); } private ShardingSphereSchema swapSchema(final YamlSchema schema) { return new ShardingSphereSchema(MapUtils.isEmpty(schema.getTables()) ? Maps.newLinkedHashMap() : schema.getTables().entrySet().stream() .collect(Collectors.toMap(Entry::getKey, entry -> swapTable(entry.getKey(), entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new))); } private TableMetaData swapTable(final String tableName, final YamlTableMetaData table) { return new TableMetaData(tableName, swapColumns(table.getColumns()), swapIndexes(table.getIndexes())); } private Collection<IndexMetaData> swapIndexes(final Map<String, YamlIndexMetaData> indexes) { return null == indexes ? Collections.emptyList() : indexes.values().stream().map(this::swapIndex).collect(Collectors.toList()); } private IndexMetaData swapIndex(final YamlIndexMetaData index) { return new IndexMetaData(index.getName()); } private Collection<ColumnMetaData> swapColumns(final Map<String, YamlColumnMetaData> indexes) { return null == indexes ? Collections.emptyList() : indexes.values().stream().map(this::swapColumn).collect(Collectors.toList()); } private ColumnMetaData swapColumn(final YamlColumnMetaData column) { return new ColumnMetaData(column.getName(), column.getDataType(), column.isPrimaryKey(), column.isGenerated(), column.isCaseSensitive()); } private YamlTableMetaData swapYamlTable(final TableMetaData table) { YamlTableMetaData result = new YamlTableMetaData(); result.setColumns(swapYamlColumns(table.getColumns())); result.setIndexes(swapYamlIndexes(table.getIndexes())); return result; } private Map<String, YamlIndexMetaData> swapYamlIndexes(final Map<String, IndexMetaData> indexes) { return indexes.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapYamlIndex(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } private YamlIndexMetaData swapYamlIndex(final IndexMetaData index) { YamlIndexMetaData result = new YamlIndexMetaData(); result.setName(index.getName()); return result; } private Map<String, YamlColumnMetaData> swapYamlColumns(final Map<String, ColumnMetaData> columns) { return columns.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapYamlColumn(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } private YamlColumnMetaData swapYamlColumn(final ColumnMetaData column) { YamlColumnMetaData result = new YamlColumnMetaData(); result.setName(column.getName()); result.setCaseSensitive(column.isCaseSensitive()); result.setGenerated(column.isGenerated()); result.setPrimaryKey(column.isPrimaryKey()); result.setDataType(result.getDataType()); result.setDataTypeName(result.getDataTypeName()); return result; } }
consider putting `UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()` into some static variable and using that.
void testRegisterConsumedPartitionGroupToEdgeManager() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; assertThat(partition2.getConsumedPartitionGroups().get(0)) .isEqualTo(partition1.getConsumedPartitionGroups().get(0)); ConsumedPartitionGroup consumedPartitionGroup = partition1.getConsumedPartitionGroups().get(0); Set<IntermediateResultPartitionID> partitionIds = new HashSet<>(); for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) { partitionIds.add(partitionId); } assertThat(partitionIds) .containsExactlyInAnyOrder( partition1.getPartitionId(), partition2.getPartitionId()); }
ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup());
void testRegisterConsumedPartitionGroupToEdgeManager() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; assertThat(partition2.getConsumedPartitionGroups().get(0)) .isEqualTo(partition1.getConsumedPartitionGroups().get(0)); ConsumedPartitionGroup consumedPartitionGroup = partition1.getConsumedPartitionGroups().get(0); Set<IntermediateResultPartitionID> partitionIds = new HashSet<>(); for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) { partitionIds.add(partitionId); } assertThat(partitionIds) .containsExactlyInAnyOrder( partition1.getPartitionId(), partition2.getPartitionId()); }
class DefaultExecutionGraphConstructionTest { @RegisterExtension static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE = TestingUtils.defaultExecutorExtension(); private ExecutionGraph createDefaultExecutionGraph(List<JobVertex> vertices) throws Exception { return TestingDefaultExecutionGraphBuilder.newBuilder() .setVertexParallelismStore(SchedulerBase.computeVertexParallelismStore(vertices)) .build(EXECUTOR_RESOURCE.getExecutor()); } private ExecutionGraph createDynamicExecutionGraph(List<JobVertex> vertices) throws Exception { return TestingDefaultExecutionGraphBuilder.newBuilder() .setVertexParallelismStore(SchedulerBase.computeVertexParallelismStore(vertices)) .buildDynamicGraph(EXECUTOR_RESOURCE.getExecutor()); } @Test void testExecutionAttemptIdInTwoIdenticalJobsIsNotSame() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2, v3)); ExecutionGraph eg1 = createDefaultExecutionGraph(ordered); ExecutionGraph eg2 = createDefaultExecutionGraph(ordered); eg1.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); eg2.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); assertThat( Sets.intersection( eg1.getRegisteredExecutions().keySet(), eg2.getRegisteredExecutions().keySet())) .isEmpty(); } /** * Creates a JobGraph of the following form. * * <pre> * v1--->v2-->\ * \ * v4 --->\ * ----->/ \ * v3-->/ v5 * \ / * ------------->/ * </pre> */ @Test void testCreateSimpleGraphBipartite() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v4.setParallelism(11); v5.setParallelism(4); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); verifyTestGraph(eg, v1, v2, v3, v4, v5); } private void verifyTestGraph( ExecutionGraph eg, JobVertex v1, JobVertex v2, JobVertex v3, JobVertex v4, JobVertex v5) { ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v1, null, Collections.singletonList(v2)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v2, Collections.singletonList(v1), Collections.singletonList(v4)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v3, null, Arrays.asList(v4, v5)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v4, Arrays.asList(v2, v3), Collections.singletonList(v5)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v5, Arrays.asList(v4, v3), null); } @Test void testCannotConnectWrongOrder() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v4.setParallelism(11); v5.setParallelism(4); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v5, v4)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); assertThatThrownBy( () -> eg.attachJobGraph( ordered, UnregisteredMetricGroups .createUnregisteredJobManagerJobMetricGroup())) .isInstanceOf(JobException.class); } @Test void testSetupInputSplits() throws Exception { final InputSplit[] emptySplits = new InputSplit[0]; InputSplitAssigner assigner1 = new TestingInputSplitAssigner(); InputSplitAssigner assigner2 = new TestingInputSplitAssigner(); InputSplitSource<InputSplit> source1 = new TestingInputSplitSource<>(emptySplits, assigner1); InputSplitSource<InputSplit> source2 = new TestingInputSplitSource<>(emptySplits, assigner2); JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v4.setParallelism(11); v5.setParallelism(4); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v3.setInputSplitSource(source1); v5.setInputSplitSource(source2); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2, v3, v4, v5)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); assertThat(eg.getAllVertices().get(v3.getID()).getSplitAssigner()).isEqualTo(assigner1); assertThat(eg.getAllVertices().get(v5.getID()).getSplitAssigner()).isEqualTo(assigner2); } @Test void testMultiConsumersForOneIntermediateResult() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); IntermediateDataSetID dataSetId = new IntermediateDataSetID(); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING, dataSetId, false); v3.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING, dataSetId, false); List<JobVertex> vertices = new ArrayList<>(Arrays.asList(v1, v2, v3)); ExecutionGraph eg = createDefaultExecutionGraph(vertices); eg.attachJobGraph( vertices, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); ExecutionJobVertex ejv1 = checkNotNull(eg.getJobVertex(v1.getID())); assertThat(ejv1.getProducedDataSets()).hasSize(1); assertThat(ejv1.getProducedDataSets()[0].getId()).isEqualTo(dataSetId); ExecutionJobVertex ejv2 = checkNotNull(eg.getJobVertex(v2.getID())); assertThat(ejv2.getInputs()).hasSize(1); assertThat(ejv2.getInputs().get(0).getId()).isEqualTo(dataSetId); ExecutionJobVertex ejv3 = checkNotNull(eg.getJobVertex(v3.getID())); assertThat(ejv3.getInputs()).hasSize(1); assertThat(ejv3.getInputs().get(0).getId()).isEqualTo(dataSetId); List<ConsumedPartitionGroup> partitionGroups1 = ejv2.getTaskVertices()[0].getAllConsumedPartitionGroups(); assertThat(partitionGroups1).hasSize(1); assertThat(partitionGroups1.get(0).getIntermediateDataSetID()).isEqualTo(dataSetId); List<ConsumedPartitionGroup> partitionGroups2 = ejv3.getTaskVertices()[0].getAllConsumedPartitionGroups(); assertThat(partitionGroups2).hasSize(1); assertThat(partitionGroups2.get(0).getIntermediateDataSetID()).isEqualTo(dataSetId); } @Test @Test void testPointWiseConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(4); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; IntermediateResultPartition partition3 = result.getPartitions()[2]; IntermediateResultPartition partition4 = result.getPartitions()[3]; ConsumedPartitionGroup consumedPartitionGroup1 = partition1.getConsumedPartitionGroups().get(0); ConsumedPartitionGroup consumedPartitionGroup2 = partition4.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup1.getNumberOfUnfinishedPartitions()).isEqualTo(2); assertThat(consumedPartitionGroup2.getNumberOfUnfinishedPartitions()).isEqualTo(2); partition1.markFinished(); partition2.markFinished(); assertThat(consumedPartitionGroup1.getNumberOfUnfinishedPartitions()).isZero(); partition3.markFinished(); partition4.markFinished(); assertThat(consumedPartitionGroup2.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testAllToAllConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; ConsumedPartitionGroup consumedPartitionGroup = partition1.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isEqualTo(2); partition1.markFinished(); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isEqualTo(1); partition2.markFinished(); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testDynamicGraphAllToAllConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDynamicExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); eg.initializeJobVertex( ejv1, 0L, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; partition1.markFinished(); partition2.markFinished(); assertThat(partition1.getConsumedPartitionGroups()).isEmpty(); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); eg.initializeJobVertex( ejv2, 0L, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); ConsumedPartitionGroup consumedPartitionGroup = partition1.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testDynamicGraphPointWiseConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(4); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDynamicExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); eg.initializeJobVertex( ejv1, 0L, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; IntermediateResultPartition partition3 = result.getPartitions()[2]; IntermediateResultPartition partition4 = result.getPartitions()[3]; partition1.markFinished(); partition2.markFinished(); partition3.markFinished(); partition4.markFinished(); assertThat(partition1.getConsumedPartitionGroups()).isEmpty(); assertThat(partition4.getConsumedPartitionGroups()).isEmpty(); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); eg.initializeJobVertex( ejv2, 0L, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); ConsumedPartitionGroup consumedPartitionGroup1 = partition1.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup1.getNumberOfUnfinishedPartitions()).isZero(); ConsumedPartitionGroup consumedPartitionGroup2 = partition4.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup2.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testAttachToDynamicGraph() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDynamicExecutionGraph(ordered); eg.attachJobGraph( ordered, UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup()); assertThat(eg.getAllVertices()).hasSize(2); Iterator<ExecutionJobVertex> jobVertices = eg.getVerticesTopologically().iterator(); assertThat(jobVertices.next().isInitialized()).isFalse(); assertThat(jobVertices.next().isInitialized()).isFalse(); } private static final class TestingInputSplitAssigner implements InputSplitAssigner { @Override public InputSplit getNextInputSplit(String host, int taskId) { return null; } @Override public void returnInputSplit(List<InputSplit> splits, int taskId) {} } private static final class TestingInputSplitSource<T extends InputSplit> implements InputSplitSource<T> { private final T[] inputSplits; private final InputSplitAssigner assigner; private TestingInputSplitSource(T[] inputSplits, InputSplitAssigner assigner) { this.inputSplits = inputSplits; this.assigner = assigner; } @Override public T[] createInputSplits(int minNumSplits) throws Exception { return inputSplits; } @Override public InputSplitAssigner getInputSplitAssigner(T[] inputSplits) { return assigner; } } }
class DefaultExecutionGraphConstructionTest { @RegisterExtension static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE = TestingUtils.defaultExecutorExtension(); private static final JobManagerJobMetricGroup JOB_MANAGER_JOB_METRIC_GROUP = UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup(); private ExecutionGraph createDefaultExecutionGraph(List<JobVertex> vertices) throws Exception { return TestingDefaultExecutionGraphBuilder.newBuilder() .setVertexParallelismStore(SchedulerBase.computeVertexParallelismStore(vertices)) .build(EXECUTOR_RESOURCE.getExecutor()); } private ExecutionGraph createDynamicExecutionGraph(List<JobVertex> vertices) throws Exception { return TestingDefaultExecutionGraphBuilder.newBuilder() .setVertexParallelismStore(SchedulerBase.computeVertexParallelismStore(vertices)) .buildDynamicGraph(EXECUTOR_RESOURCE.getExecutor()); } @Test void testExecutionAttemptIdInTwoIdenticalJobsIsNotSame() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2, v3)); ExecutionGraph eg1 = createDefaultExecutionGraph(ordered); ExecutionGraph eg2 = createDefaultExecutionGraph(ordered); eg1.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); eg2.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); assertThat( Sets.intersection( eg1.getRegisteredExecutions().keySet(), eg2.getRegisteredExecutions().keySet())) .isEmpty(); } /** * Creates a JobGraph of the following form. * * <pre> * v1--->v2-->\ * \ * v4 --->\ * ----->/ \ * v3-->/ v5 * \ / * ------------->/ * </pre> */ @Test void testCreateSimpleGraphBipartite() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v4.setParallelism(11); v5.setParallelism(4); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); verifyTestGraph(eg, v1, v2, v3, v4, v5); } private void verifyTestGraph( ExecutionGraph eg, JobVertex v1, JobVertex v2, JobVertex v3, JobVertex v4, JobVertex v5) { ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v1, null, Collections.singletonList(v2)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v2, Collections.singletonList(v1), Collections.singletonList(v4)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v3, null, Arrays.asList(v4, v5)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v4, Arrays.asList(v2, v3), Collections.singletonList(v5)); ExecutionGraphTestUtils.verifyGeneratedExecutionJobVertex( eg, v5, Arrays.asList(v4, v3), null); } @Test void testCannotConnectWrongOrder() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v4.setParallelism(11); v5.setParallelism(4); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v5, v4)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); assertThatThrownBy(() -> eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP)) .isInstanceOf(JobException.class); } @Test void testSetupInputSplits() throws Exception { final InputSplit[] emptySplits = new InputSplit[0]; InputSplitAssigner assigner1 = new TestingInputSplitAssigner(); InputSplitAssigner assigner2 = new TestingInputSplitAssigner(); InputSplitSource<InputSplit> source1 = new TestingInputSplitSource<>(emptySplits, assigner1); InputSplitSource<InputSplit> source2 = new TestingInputSplitSource<>(emptySplits, assigner2); JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v4.setParallelism(11); v5.setParallelism(4); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput( v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v3.setInputSplitSource(source1); v5.setInputSplitSource(source2); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2, v3, v4, v5)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); assertThat(eg.getAllVertices().get(v3.getID()).getSplitAssigner()).isEqualTo(assigner1); assertThat(eg.getAllVertices().get(v5.getID()).getSplitAssigner()).isEqualTo(assigner2); } @Test void testMultiConsumersForOneIntermediateResult() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); IntermediateDataSetID dataSetId = new IntermediateDataSetID(); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING, dataSetId, false); v3.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING, dataSetId, false); List<JobVertex> vertices = new ArrayList<>(Arrays.asList(v1, v2, v3)); ExecutionGraph eg = createDefaultExecutionGraph(vertices); eg.attachJobGraph(vertices, JOB_MANAGER_JOB_METRIC_GROUP); ExecutionJobVertex ejv1 = checkNotNull(eg.getJobVertex(v1.getID())); assertThat(ejv1.getProducedDataSets()).hasSize(1); assertThat(ejv1.getProducedDataSets()[0].getId()).isEqualTo(dataSetId); ExecutionJobVertex ejv2 = checkNotNull(eg.getJobVertex(v2.getID())); assertThat(ejv2.getInputs()).hasSize(1); assertThat(ejv2.getInputs().get(0).getId()).isEqualTo(dataSetId); ExecutionJobVertex ejv3 = checkNotNull(eg.getJobVertex(v3.getID())); assertThat(ejv3.getInputs()).hasSize(1); assertThat(ejv3.getInputs().get(0).getId()).isEqualTo(dataSetId); List<ConsumedPartitionGroup> partitionGroups1 = ejv2.getTaskVertices()[0].getAllConsumedPartitionGroups(); assertThat(partitionGroups1).hasSize(1); assertThat(partitionGroups1.get(0).getIntermediateDataSetID()).isEqualTo(dataSetId); List<ConsumedPartitionGroup> partitionGroups2 = ejv3.getTaskVertices()[0].getAllConsumedPartitionGroups(); assertThat(partitionGroups2).hasSize(1); assertThat(partitionGroups2.get(0).getIntermediateDataSetID()).isEqualTo(dataSetId); } @Test @Test void testPointWiseConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(4); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; IntermediateResultPartition partition3 = result.getPartitions()[2]; IntermediateResultPartition partition4 = result.getPartitions()[3]; ConsumedPartitionGroup consumedPartitionGroup1 = partition1.getConsumedPartitionGroups().get(0); ConsumedPartitionGroup consumedPartitionGroup2 = partition4.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup1.getNumberOfUnfinishedPartitions()).isEqualTo(2); assertThat(consumedPartitionGroup2.getNumberOfUnfinishedPartitions()).isEqualTo(2); partition1.markFinished(); partition2.markFinished(); assertThat(consumedPartitionGroup1.getNumberOfUnfinishedPartitions()).isZero(); partition3.markFinished(); partition4.markFinished(); assertThat(consumedPartitionGroup2.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testAllToAllConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDefaultExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; ConsumedPartitionGroup consumedPartitionGroup = partition1.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isEqualTo(2); partition1.markFinished(); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isEqualTo(1); partition2.markFinished(); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testDynamicGraphAllToAllConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDynamicExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); eg.initializeJobVertex(ejv1, 0L, JOB_MANAGER_JOB_METRIC_GROUP); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; partition1.markFinished(); partition2.markFinished(); assertThat(partition1.getConsumedPartitionGroups()).isEmpty(); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); eg.initializeJobVertex(ejv2, 0L, JOB_MANAGER_JOB_METRIC_GROUP); ConsumedPartitionGroup consumedPartitionGroup = partition1.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testDynamicGraphPointWiseConsumedPartitionGroupPartitionFinished() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(4); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDynamicExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); eg.initializeJobVertex(ejv1, 0L, JOB_MANAGER_JOB_METRIC_GROUP); IntermediateResult result = Objects.requireNonNull(eg.getJobVertex(v1.getID())).getProducedDataSets()[0]; IntermediateResultPartition partition1 = result.getPartitions()[0]; IntermediateResultPartition partition2 = result.getPartitions()[1]; IntermediateResultPartition partition3 = result.getPartitions()[2]; IntermediateResultPartition partition4 = result.getPartitions()[3]; partition1.markFinished(); partition2.markFinished(); partition3.markFinished(); partition4.markFinished(); assertThat(partition1.getConsumedPartitionGroups()).isEmpty(); assertThat(partition4.getConsumedPartitionGroups()).isEmpty(); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); eg.initializeJobVertex(ejv2, 0L, JOB_MANAGER_JOB_METRIC_GROUP); ConsumedPartitionGroup consumedPartitionGroup1 = partition1.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup1.getNumberOfUnfinishedPartitions()).isZero(); ConsumedPartitionGroup consumedPartitionGroup2 = partition4.getConsumedPartitionGroups().get(0); assertThat(consumedPartitionGroup2.getNumberOfUnfinishedPartitions()).isZero(); } @Test void testAttachToDynamicGraph() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); v1.setParallelism(2); v2.setParallelism(2); v2.connectNewDataSetAsInput( v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2)); ExecutionGraph eg = createDynamicExecutionGraph(ordered); eg.attachJobGraph(ordered, JOB_MANAGER_JOB_METRIC_GROUP); assertThat(eg.getAllVertices()).hasSize(2); Iterator<ExecutionJobVertex> jobVertices = eg.getVerticesTopologically().iterator(); assertThat(jobVertices.next().isInitialized()).isFalse(); assertThat(jobVertices.next().isInitialized()).isFalse(); } private static final class TestingInputSplitAssigner implements InputSplitAssigner { @Override public InputSplit getNextInputSplit(String host, int taskId) { return null; } @Override public void returnInputSplit(List<InputSplit> splits, int taskId) {} } private static final class TestingInputSplitSource<T extends InputSplit> implements InputSplitSource<T> { private final T[] inputSplits; private final InputSplitAssigner assigner; private TestingInputSplitSource(T[] inputSplits, InputSplitAssigner assigner) { this.inputSplits = inputSplits; this.assigner = assigner; } @Override public T[] createInputSplits(int minNumSplits) throws Exception { return inputSplits; } @Override public InputSplitAssigner getInputSplitAssigner(T[] inputSplits) { return assigner; } } }
Are these two lines necessary? Because we haven't even call the `write` method, why do we care about them?
void testClose() { int numSubpartitions = 10; TestingTierProducerAgent tierProducerAgent = new TestingTierProducerAgent(); tierProducerAgent.setTryStartNewSegmentReturnValueSupplier(() -> true); tierProducerAgent.setTryWriteReturnValueSupplier(() -> new Boolean[] {false, true}); assertThat(tierProducerAgent.isClosed()).isFalse(); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient( numSubpartitions, Collections.singletonList(tierProducerAgent)); tieredStorageProducerClient.close(); assertThat(tierProducerAgent.isClosed()).isTrue(); }
tierProducerAgent.setTryWriteReturnValueSupplier(() -> new Boolean[] {false, true});
void testClose() { int numSubpartitions = 10; AtomicBoolean isClosed = new AtomicBoolean(false); TestingTierProducerAgent tierProducerAgent = new TestingTierProducerAgent.Builder() .setCloseRunnable(() -> isClosed.set(true)) .build(); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient( numSubpartitions, Collections.singletonList(tierProducerAgent)); assertThat(isClosed.get()).isFalse(); tieredStorageProducerClient.close(); assertThat(isClosed.get()).isTrue(); }
class TieredStorageProducerClientTest { private static final int NUM_TOTAL_BUFFERS = 1000; private static final int NETWORK_BUFFER_SIZE = 1024; @Parameter public boolean isBroadcast; private NetworkBufferPool globalPool; @Parameters(name = "isBroadcast={0}") public static Collection<Boolean> parameters() { return Arrays.asList(false, true); } @BeforeEach void before() { globalPool = new NetworkBufferPool(NUM_TOTAL_BUFFERS, NETWORK_BUFFER_SIZE); } @AfterEach void after() { globalPool.destroy(); } @TestTemplate void testWriteRecordsToEmptyStorageTiers() { int numSubpartitions = 10; int bufferSize = 1024; Random random = new Random(); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient(numSubpartitions, Collections.emptyList()); assertThatThrownBy( () -> tieredStorageProducerClient.write( generateRandomData(bufferSize, random), new TieredStorageSubpartitionId(0), Buffer.DataType.DATA_BUFFER, isBroadcast)) .isInstanceOf(RuntimeException.class) .hasMessageContaining("Failed to choose a storage tier"); } @TestTemplate void testWriteRecords() throws IOException { int numSubpartitions = 10; int bufferSize = 1024; int maxNumToWriteRecordsPerSubpartition = 1000; Random random = new Random(); TestingTierProducerAgent tierProducerAgent = new TestingTierProducerAgent(); tierProducerAgent.setTryStartNewSegmentReturnValueSupplier(() -> true); tierProducerAgent.setTryWriteReturnValueSupplier(() -> new Boolean[] {false, true}); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient( numSubpartitions, Collections.singletonList(tierProducerAgent)); int numWriteRecords = 0; for (int j = 0; j < numSubpartitions; j++) { for (int i = 0; i < random.nextInt(maxNumToWriteRecordsPerSubpartition); i++) { tieredStorageProducerClient.write( generateRandomData(bufferSize, random), new TieredStorageSubpartitionId(j), Buffer.DataType.DATA_BUFFER, isBroadcast); numWriteRecords++; } } int numExpectedBuffers = isBroadcast ? numWriteRecords * numSubpartitions : numWriteRecords; assertThat(tierProducerAgent.numTotalReceivedBuffers()).isEqualTo(numExpectedBuffers); } @TestTemplate void testTierCanNotStartNewSegment() throws IOException { int numSubpartitions = 10; int bufferSize = 1024; Random random = new Random(); TestingTierProducerAgent tierProducerAgent = new TestingTierProducerAgent(); tierProducerAgent.setTryStartNewSegmentReturnValueSupplier(() -> false); tierProducerAgent.setTryWriteReturnValueSupplier(() -> new Boolean[] {false, true}); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient( numSubpartitions, Collections.singletonList(tierProducerAgent)); assertThatThrownBy( () -> tieredStorageProducerClient.write( generateRandomData(bufferSize, random), new TieredStorageSubpartitionId(0), Buffer.DataType.DATA_BUFFER, isBroadcast)) .isInstanceOf(RuntimeException.class) .hasMessageContaining("Failed to choose a storage tier"); tierProducerAgent.setTryStartNewSegmentReturnValueSupplier(() -> true); tieredStorageProducerClient.write( generateRandomData(bufferSize, random), new TieredStorageSubpartitionId(0), Buffer.DataType.DATA_BUFFER, isBroadcast); assertThat(tierProducerAgent.numTotalReceivedBuffers()) .isEqualTo(isBroadcast ? numSubpartitions : 1); } @TestTemplate private static TieredStorageProducerClient createTieredStorageProducerClient( int numSubpartitions, List<TierProducerAgent> tierProducerAgents) { TieredStorageProducerClient tieredStorageProducerClient = new TieredStorageProducerClient( numSubpartitions, false, new TestingBufferAccumulator(), null, tierProducerAgents); tieredStorageProducerClient.setMetricStatisticsUpdater(metricStatistics -> {}); return tieredStorageProducerClient; } }
class TieredStorageProducerClientTest { private static final int NUM_TOTAL_BUFFERS = 1000; private static final int NETWORK_BUFFER_SIZE = 1024; @Parameter public boolean isBroadcast; private NetworkBufferPool globalPool; @Parameters(name = "isBroadcast={0}") public static Collection<Boolean> parameters() { return Arrays.asList(false, true); } @BeforeEach void before() { globalPool = new NetworkBufferPool(NUM_TOTAL_BUFFERS, NETWORK_BUFFER_SIZE); } @AfterEach void after() { globalPool.destroy(); } @TestTemplate void testWriteRecordsToEmptyStorageTiers() { int numSubpartitions = 10; int bufferSize = 1024; Random random = new Random(); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient(numSubpartitions, Collections.emptyList()); assertThatThrownBy( () -> tieredStorageProducerClient.write( generateRandomData(bufferSize, random), new TieredStorageSubpartitionId(0), Buffer.DataType.DATA_BUFFER, isBroadcast)) .isInstanceOf(RuntimeException.class) .hasMessageContaining("Failed to choose a storage tier"); } @TestTemplate void testWriteRecords() throws IOException { int numSubpartitions = 10; int numToWriteRecords = 20; int bufferSize = 1024; Random random = new Random(); AtomicInteger numReceivedBuffers = new AtomicInteger(0); AtomicInteger numReceivedBytes = new AtomicInteger(0); AtomicInteger numReceivedBuffersInTier1 = new AtomicInteger(0); AtomicInteger numReceivedBuffersInTier2 = new AtomicInteger(0); TestingTierProducerAgent tierProducerAgent1 = new TestingTierProducerAgent.Builder() .setTryStartSegmentSupplier( ((subpartitionId, integer) -> numReceivedBuffersInTier1.get() < 1)) .setTryWriterFunction( ((subpartitionId, buffer) -> { boolean isSuccess = numReceivedBuffersInTier1.get() % 2 == 0; if (isSuccess) { numReceivedBuffers.incrementAndGet(); numReceivedBuffersInTier1.incrementAndGet(); numReceivedBytes.set( numReceivedBytes.get() + buffer.readableBytes()); } return isSuccess; })) .build(); TestingTierProducerAgent tierProducerAgent2 = new TestingTierProducerAgent.Builder() .setTryWriterFunction( ((subpartitionId, buffer) -> { numReceivedBuffers.incrementAndGet(); numReceivedBuffersInTier2.incrementAndGet(); numReceivedBytes.set( numReceivedBytes.get() + buffer.readableBytes()); return true; })) .build(); List<TierProducerAgent> tierProducerAgents = new ArrayList<>(); tierProducerAgents.add(tierProducerAgent1); tierProducerAgents.add(tierProducerAgent2); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient(numSubpartitions, tierProducerAgents); TieredStorageSubpartitionId subpartitionId = new TieredStorageSubpartitionId(0); for (int i = 0; i < numToWriteRecords; i++) { tieredStorageProducerClient.write( generateRandomData(bufferSize, random), subpartitionId, Buffer.DataType.DATA_BUFFER, isBroadcast); } int numExpectedBytes = isBroadcast ? numSubpartitions * numToWriteRecords * bufferSize : numToWriteRecords * bufferSize; assertThat(numReceivedBuffersInTier1.get()).isEqualTo(1); assertThat(numReceivedBuffers.get()) .isEqualTo(numReceivedBuffersInTier1.get() + numReceivedBuffersInTier2.get()); assertThat(numReceivedBytes.get()).isEqualTo(numExpectedBytes); } @TestTemplate void testTierCanNotStartNewSegment() { int numSubpartitions = 10; int bufferSize = 1024; Random random = new Random(); TestingTierProducerAgent tierProducerAgent = new TestingTierProducerAgent.Builder() .setTryStartSegmentSupplier(((subpartitionId, integer) -> false)) .build(); TieredStorageProducerClient tieredStorageProducerClient = createTieredStorageProducerClient( numSubpartitions, Collections.singletonList(tierProducerAgent)); assertThatThrownBy( () -> tieredStorageProducerClient.write( generateRandomData(bufferSize, random), new TieredStorageSubpartitionId(0), Buffer.DataType.DATA_BUFFER, isBroadcast)) .isInstanceOf(RuntimeException.class) .hasMessageContaining("Failed to choose a storage tier"); } @TestTemplate void testUpdateMetrics() throws IOException { int numSubpartitions = 10; int bufferSize = 1024; Random random = new Random(); TestingTierProducerAgent tierProducerAgent = new TestingTierProducerAgent.Builder().build(); TieredStorageProducerClient tieredStorageProducerClient = new TieredStorageProducerClient( numSubpartitions, false, new TestingBufferAccumulator(), null, Collections.singletonList(tierProducerAgent)); AtomicInteger numWriteBuffers = new AtomicInteger(0); AtomicInteger numWriteBytes = new AtomicInteger(0); tieredStorageProducerClient.setMetricStatisticsUpdater( metricStatistics -> { numWriteBuffers.set( numWriteBuffers.get() + metricStatistics.numWriteBuffersDelta()); numWriteBytes.set(numWriteBytes.get() + metricStatistics.numWriteBytesDelta()); }); tieredStorageProducerClient.write( generateRandomData(bufferSize, random), new TieredStorageSubpartitionId(0), Buffer.DataType.DATA_BUFFER, isBroadcast); int numExpectedBuffers = isBroadcast ? numSubpartitions : 1; int numExpectedBytes = isBroadcast ? bufferSize * numSubpartitions : bufferSize; assertThat(numWriteBuffers.get()).isEqualTo(numExpectedBuffers); assertThat(numWriteBytes.get()).isEqualTo(numExpectedBytes); } @TestTemplate private static TieredStorageProducerClient createTieredStorageProducerClient( int numSubpartitions, List<TierProducerAgent> tierProducerAgents) { TieredStorageProducerClient tieredStorageProducerClient = new TieredStorageProducerClient( numSubpartitions, false, new TestingBufferAccumulator(), null, tierProducerAgents); tieredStorageProducerClient.setMetricStatisticsUpdater(metricStatistics -> {}); return tieredStorageProducerClient; } }
`ArrayList<JarFile>` should be declared as `Collection<JarFile>`
public void assertInitPluginLifecycleService() { Map<String, PluginConfiguration> pluginConfigs = new HashMap<>(); ArrayList<JarFile> pluginJars = new ArrayList<>(); PluginLifecycleServiceManager.init(pluginConfigs, pluginJars, new MultipleParentClassLoader(new ArrayList<>()), true); }
ArrayList<JarFile> pluginJars = new ArrayList<>();
public void assertInitPluginLifecycleService() { Map<String, PluginConfiguration> pluginConfigs = new HashMap<>(); Collection<JarFile> pluginJars = new LinkedList<>(); PluginLifecycleServiceManager.init(pluginConfigs, pluginJars, new MultipleParentClassLoader(new LinkedList<>()), true); }
class PluginLifecycleServiceManagerTest { @Test @Test public void assertInitPluginLifecycleServiceWithMap() { Map<String, PluginConfiguration> stringPluginConfigurationMap = new HashMap<>(); stringPluginConfigurationMap.put("Key", new PluginConfiguration("localhost", 8080, "random", new Properties())); ArrayList<JarFile> pluginJars = new ArrayList<>(); PluginLifecycleServiceManager.init(stringPluginConfigurationMap, pluginJars, new MultipleParentClassLoader(new ArrayList<>()), true); } @Test public void assertInitPluginLifecycleServiceWithMockHandler() throws MalformedURLException { Map<String, PluginConfiguration> pluginConfigs = new HashMap<>(); ArrayList<JarFile> pluginJars = new ArrayList<>(); URLStreamHandlerFactory urlStreamHandlerFactory = mock(URLStreamHandlerFactory.class); when(urlStreamHandlerFactory.createURLStreamHandler((String) any())).thenReturn(null); PluginLifecycleServiceManager.init(pluginConfigs, pluginJars, new PrivateMLet(new URL[]{Paths.get(System.getProperty("java.io.tmpdir"), "test.txt").toUri().toURL()}, new MultipleParentClassLoader(new ArrayList<>()), urlStreamHandlerFactory, true), true); verify(urlStreamHandlerFactory).createURLStreamHandler((String) any()); } }
class PluginLifecycleServiceManagerTest { @Test @Test public void assertInitPluginLifecycleServiceWithMap() { Map<String, PluginConfiguration> stringPluginConfigurationMap = new HashMap<>(); stringPluginConfigurationMap.put("Key", new PluginConfiguration("localhost", 8080, "random", new Properties())); Collection<JarFile> pluginJars = new LinkedList<>(); PluginLifecycleServiceManager.init(stringPluginConfigurationMap, pluginJars, new MultipleParentClassLoader(new LinkedList<>()), true); } @Test public void assertInitPluginLifecycleServiceWithMockHandler() throws MalformedURLException { Map<String, PluginConfiguration> pluginConfigs = new HashMap<>(); Collection<JarFile> pluginJars = new LinkedList<>(); URLStreamHandlerFactory urlStreamHandlerFactory = mock(URLStreamHandlerFactory.class); when(urlStreamHandlerFactory.createURLStreamHandler((String) any())).thenReturn(null); PluginLifecycleServiceManager.init(pluginConfigs, pluginJars, new PrivateMLet(new URL[]{Paths.get(System.getProperty("java.io.tmpdir"), "test.txt").toUri().toURL()}, new MultipleParentClassLoader(new LinkedList<>()), urlStreamHandlerFactory, true), true); verify(urlStreamHandlerFactory).createURLStreamHandler((String) any()); } }
Move the `() ->` down to the next line too, so it is `() -> listKeysFirstPage(),`
public PagedFlux<KeyBase> listKeys() { return new PagedFlux<>(() -> listKeysFirstPage(), continuationToken -> listKeysNextPage(continuationToken)); }
continuationToken -> listKeysNextPage(continuationToken));
public PagedFlux<KeyBase> listKeys() { return new PagedFlux<>(() -> listKeysFirstPage(), continuationToken -> listKeysNextPage(continuationToken)); }
class KeyAsyncClient { static final String API_VERSION = "7.0"; static final String ACCEPT_LANGUAGE = "en-US"; static final int DEFAULT_MAX_PAGE_RESULTS = 25; static final String CONTENT_TYPE_HEADER_VALUE = "application/json"; static final String KEY_VAULT_SCOPE = "https: private final String endpoint; private final KeyService service; private final ClientLogger logger = new ClientLogger(KeyAsyncClient.class); /** * Creates a KeyAsyncClient that uses {@code pipeline} to service requests * * @param endpoint URL for the Azure KeyVault service. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. */ KeyAsyncClient(URL endpoint, HttpPipeline pipeline) { Objects.requireNonNull(endpoint, KeyVaultErrorCodeStrings.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED)); this.endpoint = endpoint.toString(); this.service = RestProxy.create(KeyService.class, pipeline); } /** * Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link KeyType keyType} indicates the type of key to create. Possible values include: {@link KeyType * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new EC key. Subscribes to the call asynchronously and prints out the newly created key details when a response has been received.</p> * <pre> * keyAsyncClient.createKey("keyName", KeyType.EC).subscribe(keyResponse -&gt; * System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param name The name of the key being created. * @param keyType The type of key to create. For valid values, see {@link KeyType KeyType}. * @throws ResourceModifiedException if {@code name} or {@code keyType} is null. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createKey(String name, KeyType keyType) { KeyRequestParameters parameters = new KeyRequestParameters().kty(keyType); return service.createKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating key - {}", name)) .doOnSuccess(response -> logger.info("Created key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create key - {}", name, error)); } /** * Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link KeyCreateOptions} is required. The {@link KeyCreateOptions * are optional. The {@link KeyCreateOptions * * <p>The {@link KeyCreateOptions * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new Rsa key which activates in one day and expires in one year. Subscribes to the call asynchronously * and prints out the newly created key details when a response has been received.</p> * <pre> * KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA) * .notBefore(OffsetDateTime.now().plusDays(1)) * .expires(OffsetDateTime.now().plusYears(1)); * * keyAsyncClient.createKey(keyCreateOptions).subscribe(keyResponse -&gt; * System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param keyCreateOptions The key configuration object containing information about the key being created. * @throws NullPointerException if {@code keyCreateOptions} is {@code null}. * @throws ResourceModifiedException if {@code keyCreateOptions} is malformed. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createKey(KeyCreateOptions keyCreateOptions) { Objects.requireNonNull(keyCreateOptions, "The key options parameter cannot be null."); KeyRequestParameters parameters = new KeyRequestParameters() .kty(keyCreateOptions.keyType()) .keyOps(keyCreateOptions.keyOperations()) .keyAttributes(new KeyRequestAttributes(keyCreateOptions)); return service.createKey(endpoint, keyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating key - {}", keyCreateOptions.name())) .doOnSuccess(response -> logger.info("Created key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create key - {}", keyCreateOptions.name(), error)); } /** * Creates a new Rsa key and stores it in the key vault. The create Rsa key operation can be used to create any Rsa key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link RsaKeyCreateOptions} is required. The {@link RsaKeyCreateOptions * and {@link RsaKeyCreateOptions * is set to true by Azure Key Vault, if not specified.</p> * * <p>The {@link RsaKeyCreateOptions * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new RSA key with size 2048 which activates in one day and expires in one year. Subscribes to the call asynchronously * and prints out the newly created key details when a response has been received.</p> * <pre> * RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName", KeyType.RSA) * .keySize(2048) * .notBefore(OffsetDateTime.now().plusDays(1)) * .expires(OffsetDateTime.now().plusYears(1)); * * keyAsyncClient.createRsaKey(rsaKeyCreateOptions).subscribe(keyResponse -&gt; * System.out.printf("RSA Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param rsaKeyCreateOptions The key configuration object containing information about the rsa key being created. * @throws NullPointerException if {@code rsaKeyCreateOptions} is {@code null}. * @throws ResourceModifiedException if {@code rsaKeyCreateOptions} is malformed. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createRsaKey(RsaKeyCreateOptions rsaKeyCreateOptions) { Objects.requireNonNull(rsaKeyCreateOptions, "The Rsa key options parameter cannot be null."); KeyRequestParameters parameters = new KeyRequestParameters() .kty(rsaKeyCreateOptions.keyType()) .keySize(rsaKeyCreateOptions.keySize()) .keyOps(rsaKeyCreateOptions.keyOperations()) .keyAttributes(new KeyRequestAttributes(rsaKeyCreateOptions)); return service.createKey(endpoint, rsaKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating Rsa key - {}", rsaKeyCreateOptions.name())) .doOnSuccess(response -> logger.info("Created Rsa key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create Rsa key - {}", rsaKeyCreateOptions.name(), error)); } /** * Creates a new Ec key and stores it in the key vault. The create Ec key operation can be used to create any Ec key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link EcKeyCreateOptions} parameter is required. The {@link EcKeyCreateOptions * default value of {@link KeyCurveName * are optional. The {@link EcKeyCreateOptions * * <p>The {@link EcKeyCreateOptions * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new EC key with P-384 web key curve. The key activates in one day and expires in one year. Subscribes to the call asynchronously * and prints out the newly created ec key details when a response has been received.</p> * <pre> * EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName", KeyType.EC) * .curve(KeyCurveName.P_384) * .notBefore(OffsetDateTime.now().plusDays(1)) * .expires(OffsetDateTime.now().plusYears(1)); * * keyAsyncClient.createEcKey(ecKeyCreateOptions).subscribe(keyResponse -&gt; * System.out.printf("EC Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param ecKeyCreateOptions The key options object containing information about the ec key being created. * @throws NullPointerException if {@code ecKeyCreateOptions} is {@code null}. * @throws ResourceModifiedException if {@code ecKeyCreateOptions} is malformed. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createEcKey(EcKeyCreateOptions ecKeyCreateOptions) { Objects.requireNonNull(ecKeyCreateOptions, "The Ec key options options cannot be null."); KeyRequestParameters parameters = new KeyRequestParameters() .kty(ecKeyCreateOptions.keyType()) .curve(ecKeyCreateOptions.curve()) .keyOps(ecKeyCreateOptions.keyOperations()) .keyAttributes(new KeyRequestAttributes(ecKeyCreateOptions)); return service.createKey(endpoint, ecKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating Ec key - {}", ecKeyCreateOptions.name())) .doOnSuccess(response -> logger.info("Created Ec key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create Ec key - {}", ecKeyCreateOptions.name(), error)); } /** * Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type * into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission. * * <p><strong>Code Samples</strong></p> * <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details * when a response has been received.</p> * <pre> * keyAsyncClient.importKey("keyName", jsonWebKeyToImport).subscribe(keyResponse -&gt; * System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param name The name for the imported key. * @param keyMaterial The Json web key being imported. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> importKey(String name, JsonWebKey keyMaterial) { KeyImportRequestParameters parameters = new KeyImportRequestParameters().key(keyMaterial); return service.importKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Importing key - {}", name)) .doOnSuccess(response -> logger.info("Imported key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to import key - {}", name, error)); } /** * Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type * into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission. * * <p>The {@code keyImportOptions} is required and its fields {@link KeyImportOptions * be null. The {@link KeyImportOptions * are optional. If not specified, no values are set for the fields. The {@link KeyImportOptions * the {@link KeyImportOptions * * <p><strong>Code Samples</strong></p> * <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details * when a response has been received.</p> * <pre> * KeyImportOptions keyImportOptions = new KeyImportOptions("keyName", jsonWebKeyToImport) * .hsm(true) * .expires(OffsetDateTime.now().plusDays(60)); * * keyAsyncClient.importKey(keyImportOptions).subscribe(keyResponse -&gt; * System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param keyImportOptions The key import configuration object containing information about the json web key being imported. * @throws NullPointerException if {@code keyImportOptions} is {@code null}. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> importKey(KeyImportOptions keyImportOptions) { Objects.requireNonNull(keyImportOptions, "The key import configuration parameter cannot be null."); KeyImportRequestParameters parameters = new KeyImportRequestParameters() .key(keyImportOptions.keyMaterial()) .hsm(keyImportOptions.hsm()) .keyAttributes(new KeyRequestAttributes(keyImportOptions)); return service.importKey(endpoint, keyImportOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Importing key - {}", keyImportOptions.name())) .doOnSuccess(response -> logger.info("Imported key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to import key - {}", keyImportOptions.name(), error)); } /** * Gets the public part of the specified key and key version. The get key operation is applicable to all key types and it requires the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets a specific version of the key in the key vault. Subscribes to the call asynchronously and prints out the * returned key details when a response has been received.</p> * <pre> * String keyVersion = "6A385B124DEF4096AF1361A85B16C204"; * keyAsyncClient.getKey("keyName", keyVersion).subscribe(keyResponse -&gt; * System.out.printf("Key returned with name %s, id %s and version %s", keyResponse.value().name(), * keyResponse.value().id(), keyResponse.value().version())); * </pre> * * @param name The name of the key, cannot be null * @param version The version of the key to retrieve. If this is an empty String or null, this call is equivalent to calling {@link KeyAsyncClient * @throws ResourceNotFoundException when a key with {@code name} and {@code version} doesn't exist in the key vault. * @throws HttpRequestException if {@code name} or {@code version} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> getKey(String name, String version) { String keyVersion = ""; if (version != null) { keyVersion = version; } return service.getKey(endpoint, name, keyVersion, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Retrieving key - {}", name)) .doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to get key - {}", name, error)); } /** * Get the public part of the latest version of the specified key from the key vault. The get key operation is applicable to * all key types and it requires the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets latest version of the key in the key vault. Subscribes to the call asynchronously and prints out the * returned key details when a response has been received.</p> * <pre> * keyAsyncClient.getKey("keyName").subscribe(keyResponse -&gt; * System.out.printf("Key with name %s, id %s \n", keyResponse.value().name(), * keyResponse.value().id())); * </pre> * * @param name The name of the key. * @throws ResourceNotFoundException when a key with {@code name} doesn't exist in the key vault. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> getKey(String name) { return getKey(name, "") .doOnRequest(ignored -> logger.info("Retrieving key - {}", name)) .doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to get key - {}", name, error)); } /** * Get public part of the key which represents {@link KeyBase keyBase} from the key vault. The get key operation is applicable * to all key types and it requires the {@code keys/get} permission. * * <p>The list operations {@link KeyAsyncClient * the {@link Flux} containing {@link KeyBase base key} as output excluding the key material of the key. * This operation can then be used to get the full key with its key material from {@code keyBase}.</p> * <pre> * keyAsyncClient.listKeys().subscribe(keyBase -&gt; * client.getKey(keyBase).subscribe(keyResponse -&gt; * System.out.printf("Key with name %s and value %s \n", keyResponse.value().name(), keyResponse.value().id()))); * </pre> * * @param keyBase The {@link KeyBase base key} holding attributes of the key being requested. * @throws ResourceNotFoundException when a key with {@link KeyBase * @throws HttpRequestException if {@link KeyBase
class KeyAsyncClient { static final String API_VERSION = "7.0"; static final String ACCEPT_LANGUAGE = "en-US"; static final int DEFAULT_MAX_PAGE_RESULTS = 25; static final String CONTENT_TYPE_HEADER_VALUE = "application/json"; static final String KEY_VAULT_SCOPE = "https: private final String endpoint; private final KeyService service; private final ClientLogger logger = new ClientLogger(KeyAsyncClient.class); /** * Creates a KeyAsyncClient that uses {@code pipeline} to service requests * * @param endpoint URL for the Azure KeyVault service. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. */ KeyAsyncClient(URL endpoint, HttpPipeline pipeline) { Objects.requireNonNull(endpoint, KeyVaultErrorCodeStrings.getErrorString(KeyVaultErrorCodeStrings.VAULT_END_POINT_REQUIRED)); this.endpoint = endpoint.toString(); this.service = RestProxy.create(KeyService.class, pipeline); } /** * Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link KeyType keyType} indicates the type of key to create. Possible values include: {@link KeyType * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new EC key. Subscribes to the call asynchronously and prints out the newly created key details when a response has been received.</p> * <pre> * keyAsyncClient.createKey("keyName", KeyType.EC).subscribe(keyResponse -&gt; * System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param name The name of the key being created. * @param keyType The type of key to create. For valid values, see {@link KeyType KeyType}. * @throws ResourceModifiedException if {@code name} or {@code keyType} is null. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createKey(String name, KeyType keyType) { KeyRequestParameters parameters = new KeyRequestParameters().kty(keyType); return service.createKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating key - {}", name)) .doOnSuccess(response -> logger.info("Created key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create key - {}", name, error)); } /** * Creates a new key and stores it in the key vault. The create key operation can be used to create any key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link KeyCreateOptions} is required. The {@link KeyCreateOptions * are optional. The {@link KeyCreateOptions * * <p>The {@link KeyCreateOptions * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new Rsa key which activates in one day and expires in one year. Subscribes to the call asynchronously * and prints out the newly created key details when a response has been received.</p> * <pre> * KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA) * .notBefore(OffsetDateTime.now().plusDays(1)) * .expires(OffsetDateTime.now().plusYears(1)); * * keyAsyncClient.createKey(keyCreateOptions).subscribe(keyResponse -&gt; * System.out.printf("Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param keyCreateOptions The key configuration object containing information about the key being created. * @throws NullPointerException if {@code keyCreateOptions} is {@code null}. * @throws ResourceModifiedException if {@code keyCreateOptions} is malformed. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createKey(KeyCreateOptions keyCreateOptions) { Objects.requireNonNull(keyCreateOptions, "The key options parameter cannot be null."); KeyRequestParameters parameters = new KeyRequestParameters() .kty(keyCreateOptions.keyType()) .keyOps(keyCreateOptions.keyOperations()) .keyAttributes(new KeyRequestAttributes(keyCreateOptions)); return service.createKey(endpoint, keyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating key - {}", keyCreateOptions.name())) .doOnSuccess(response -> logger.info("Created key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create key - {}", keyCreateOptions.name(), error)); } /** * Creates a new Rsa key and stores it in the key vault. The create Rsa key operation can be used to create any Rsa key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link RsaKeyCreateOptions} is required. The {@link RsaKeyCreateOptions * and {@link RsaKeyCreateOptions * is set to true by Azure Key Vault, if not specified.</p> * * <p>The {@link RsaKeyCreateOptions * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new RSA key with size 2048 which activates in one day and expires in one year. Subscribes to the call asynchronously * and prints out the newly created key details when a response has been received.</p> * <pre> * RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName", KeyType.RSA) * .keySize(2048) * .notBefore(OffsetDateTime.now().plusDays(1)) * .expires(OffsetDateTime.now().plusYears(1)); * * keyAsyncClient.createRsaKey(rsaKeyCreateOptions).subscribe(keyResponse -&gt; * System.out.printf("RSA Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param rsaKeyCreateOptions The key configuration object containing information about the rsa key being created. * @throws NullPointerException if {@code rsaKeyCreateOptions} is {@code null}. * @throws ResourceModifiedException if {@code rsaKeyCreateOptions} is malformed. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createRsaKey(RsaKeyCreateOptions rsaKeyCreateOptions) { Objects.requireNonNull(rsaKeyCreateOptions, "The Rsa key options parameter cannot be null."); KeyRequestParameters parameters = new KeyRequestParameters() .kty(rsaKeyCreateOptions.keyType()) .keySize(rsaKeyCreateOptions.keySize()) .keyOps(rsaKeyCreateOptions.keyOperations()) .keyAttributes(new KeyRequestAttributes(rsaKeyCreateOptions)); return service.createKey(endpoint, rsaKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating Rsa key - {}", rsaKeyCreateOptions.name())) .doOnSuccess(response -> logger.info("Created Rsa key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create Rsa key - {}", rsaKeyCreateOptions.name(), error)); } /** * Creates a new Ec key and stores it in the key vault. The create Ec key operation can be used to create any Ec key type in * key vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the {@code keys/create} permission. * * <p>The {@link EcKeyCreateOptions} parameter is required. The {@link EcKeyCreateOptions * default value of {@link KeyCurveName * are optional. The {@link EcKeyCreateOptions * * <p>The {@link EcKeyCreateOptions * {@link KeyType * * <p><strong>Code Samples</strong></p> * <p>Creates a new EC key with P-384 web key curve. The key activates in one day and expires in one year. Subscribes to the call asynchronously * and prints out the newly created ec key details when a response has been received.</p> * <pre> * EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName", KeyType.EC) * .curve(KeyCurveName.P_384) * .notBefore(OffsetDateTime.now().plusDays(1)) * .expires(OffsetDateTime.now().plusYears(1)); * * keyAsyncClient.createEcKey(ecKeyCreateOptions).subscribe(keyResponse -&gt; * System.out.printf("EC Key is created with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param ecKeyCreateOptions The key options object containing information about the ec key being created. * @throws NullPointerException if {@code ecKeyCreateOptions} is {@code null}. * @throws ResourceModifiedException if {@code ecKeyCreateOptions} is malformed. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> createEcKey(EcKeyCreateOptions ecKeyCreateOptions) { Objects.requireNonNull(ecKeyCreateOptions, "The Ec key options options cannot be null."); KeyRequestParameters parameters = new KeyRequestParameters() .kty(ecKeyCreateOptions.keyType()) .curve(ecKeyCreateOptions.curve()) .keyOps(ecKeyCreateOptions.keyOperations()) .keyAttributes(new KeyRequestAttributes(ecKeyCreateOptions)); return service.createKey(endpoint, ecKeyCreateOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Creating Ec key - {}", ecKeyCreateOptions.name())) .doOnSuccess(response -> logger.info("Created Ec key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to create Ec key - {}", ecKeyCreateOptions.name(), error)); } /** * Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type * into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission. * * <p><strong>Code Samples</strong></p> * <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details * when a response has been received.</p> * <pre> * keyAsyncClient.importKey("keyName", jsonWebKeyToImport).subscribe(keyResponse -&gt; * System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param name The name for the imported key. * @param keyMaterial The Json web key being imported. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> importKey(String name, JsonWebKey keyMaterial) { KeyImportRequestParameters parameters = new KeyImportRequestParameters().key(keyMaterial); return service.importKey(endpoint, name, API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Importing key - {}", name)) .doOnSuccess(response -> logger.info("Imported key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to import key - {}", name, error)); } /** * Imports an externally created key and stores it in key vault. The import key operation may be used to import any key type * into the Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the {@code keys/import} permission. * * <p>The {@code keyImportOptions} is required and its fields {@link KeyImportOptions * be null. The {@link KeyImportOptions * are optional. If not specified, no values are set for the fields. The {@link KeyImportOptions * the {@link KeyImportOptions * * <p><strong>Code Samples</strong></p> * <p>Imports a new key into key vault. Subscribes to the call asynchronously and prints out the newly imported key details * when a response has been received.</p> * <pre> * KeyImportOptions keyImportOptions = new KeyImportOptions("keyName", jsonWebKeyToImport) * .hsm(true) * .expires(OffsetDateTime.now().plusDays(60)); * * keyAsyncClient.importKey(keyImportOptions).subscribe(keyResponse -&gt; * System.out.printf("Key is imported with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id())); * </pre> * * @param keyImportOptions The key import configuration object containing information about the json web key being imported. * @throws NullPointerException if {@code keyImportOptions} is {@code null}. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> importKey(KeyImportOptions keyImportOptions) { Objects.requireNonNull(keyImportOptions, "The key import configuration parameter cannot be null."); KeyImportRequestParameters parameters = new KeyImportRequestParameters() .key(keyImportOptions.keyMaterial()) .hsm(keyImportOptions.hsm()) .keyAttributes(new KeyRequestAttributes(keyImportOptions)); return service.importKey(endpoint, keyImportOptions.name(), API_VERSION, ACCEPT_LANGUAGE, parameters, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Importing key - {}", keyImportOptions.name())) .doOnSuccess(response -> logger.info("Imported key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to import key - {}", keyImportOptions.name(), error)); } /** * Gets the public part of the specified key and key version. The get key operation is applicable to all key types and it requires the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets a specific version of the key in the key vault. Subscribes to the call asynchronously and prints out the * returned key details when a response has been received.</p> * <pre> * String keyVersion = "6A385B124DEF4096AF1361A85B16C204"; * keyAsyncClient.getKey("keyName", keyVersion).subscribe(keyResponse -&gt; * System.out.printf("Key returned with name %s, id %s and version %s", keyResponse.value().name(), * keyResponse.value().id(), keyResponse.value().version())); * </pre> * * @param name The name of the key, cannot be null * @param version The version of the key to retrieve. If this is an empty String or null, this call is equivalent to calling {@link KeyAsyncClient * @throws ResourceNotFoundException when a key with {@code name} and {@code version} doesn't exist in the key vault. * @throws HttpRequestException if {@code name} or {@code version} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> getKey(String name, String version) { String keyVersion = ""; if (version != null) { keyVersion = version; } return service.getKey(endpoint, name, keyVersion, API_VERSION, ACCEPT_LANGUAGE, CONTENT_TYPE_HEADER_VALUE) .doOnRequest(ignored -> logger.info("Retrieving key - {}", name)) .doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to get key - {}", name, error)); } /** * Get the public part of the latest version of the specified key from the key vault. The get key operation is applicable to * all key types and it requires the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets latest version of the key in the key vault. Subscribes to the call asynchronously and prints out the * returned key details when a response has been received.</p> * <pre> * keyAsyncClient.getKey("keyName").subscribe(keyResponse -&gt; * System.out.printf("Key with name %s, id %s \n", keyResponse.value().name(), * keyResponse.value().id())); * </pre> * * @param name The name of the key. * @throws ResourceNotFoundException when a key with {@code name} doesn't exist in the key vault. * @throws HttpRequestException if {@code name} is empty string. * @return A {@link Mono} containing a {@link Response} whose {@link Response */ public Mono<Response<Key>> getKey(String name) { return getKey(name, "") .doOnRequest(ignored -> logger.info("Retrieving key - {}", name)) .doOnSuccess(response -> logger.info("Retrieved key - {}", response.value().name())) .doOnError(error -> logger.warning("Failed to get key - {}", name, error)); } /** * Get public part of the key which represents {@link KeyBase keyBase} from the key vault. The get key operation is applicable * to all key types and it requires the {@code keys/get} permission. * * <p>The list operations {@link KeyAsyncClient * the {@link Flux} containing {@link KeyBase base key} as output excluding the key material of the key. * This operation can then be used to get the full key with its key material from {@code keyBase}.</p> * <pre> * keyAsyncClient.listKeys().subscribe(keyBase -&gt; * client.getKey(keyBase).subscribe(keyResponse -&gt; * System.out.printf("Key with name %s and value %s \n", keyResponse.value().name(), keyResponse.value().id()))); * </pre> * * @param keyBase The {@link KeyBase base key} holding attributes of the key being requested. * @throws ResourceNotFoundException when a key with {@link KeyBase * @throws HttpRequestException if {@link KeyBase
I am planning to do another round of cleanup to make things a bit more consistent , I will address this later during the code cleanup ;)
public void publishTelemetryLifecycleTest(HttpClient httpClient, DigitalTwinsServiceVersion serviceVersion) { DigitalTwinsAsyncClient client = getAsyncClient(httpClient, serviceVersion); String wifiModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.WIFI_MODEL_ID_PREFIX, client, randomIntegerStringGenerator); String roomWithWifiModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.ROOM_WITH_WIFI_MODEL_ID_PREFIX, client, randomIntegerStringGenerator); String roomWithWifiTwinId = UniqueIdHelper.getUniqueDigitalTwinId(TestAssetDefaults.ROOM_WITH_WIFI_TWIN_ID_PREFIX, client, randomIntegerStringGenerator); try { createModelsAndTwins(client, wifiModelId, roomWithWifiModelId, roomWithWifiTwinId); PublishTelemetryRequestOptions telemetryRequestOptions = new PublishTelemetryRequestOptions().setMessageId(testResourceNamer.randomUuid()); StepVerifier.create(client.publishTelemetryWithResponse( roomWithWifiTwinId, "{\"Telemetry1\": 5}", telemetryRequestOptions, Context.NONE)) .assertNext(createResponse -> assertThat(createResponse.getStatusCode()) .as("Publish telemetry succeeds") .isEqualTo(HttpURLConnection.HTTP_NO_CONTENT)) .verifyComplete(); PublishTelemetryRequestOptions componentTelemetryRequestOptions = new PublishTelemetryRequestOptions().setMessageId(testResourceNamer.randomUuid()); Dictionary<String, Integer> telemetryPayload = new Hashtable<>(); telemetryPayload.put("ComponentTelemetry1", 9); String telemetryStringPayload = new ObjectMapper().writeValueAsString(telemetryPayload); StepVerifier.create(client.publishComponentTelemetryWithResponse( roomWithWifiTwinId, TestAssetDefaults.WIFI_COMPONENT_NAME, telemetryStringPayload, componentTelemetryRequestOptions, Context.NONE)) .assertNext(createResponse -> assertThat(createResponse.getStatusCode()) .as("Publish telemetry succeeds") .isEqualTo(HttpURLConnection.HTTP_NO_CONTENT)) .verifyComplete(); } catch (Exception ex) { fail("Failure in executing a step in the test case", ex); } finally { try { if (roomWithWifiTwinId != null){ client.deleteDigitalTwin(roomWithWifiTwinId).block(); } if (roomWithWifiModelId != null){ client.deleteModel(roomWithWifiModelId).block(); } if(wifiModelId != null){ client.deleteModel(wifiModelId).block(); } } catch (Exception ex) { fail("Test cleanup failed", ex); } } }
createModelsAndTwins(client, wifiModelId, roomWithWifiModelId, roomWithWifiTwinId);
public void publishTelemetryLifecycleTest(HttpClient httpClient, DigitalTwinsServiceVersion serviceVersion) { DigitalTwinsAsyncClient client = getAsyncClient(httpClient, serviceVersion); String wifiModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.WIFI_MODEL_ID_PREFIX, client, randomIntegerStringGenerator); String roomWithWifiModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.ROOM_WITH_WIFI_MODEL_ID_PREFIX, client, randomIntegerStringGenerator); String roomWithWifiTwinId = UniqueIdHelper.getUniqueDigitalTwinId(TestAssetDefaults.ROOM_WITH_WIFI_TWIN_ID_PREFIX, client, randomIntegerStringGenerator); try { createModelsAndTwins(client, wifiModelId, roomWithWifiModelId, roomWithWifiTwinId); PublishTelemetryRequestOptions telemetryRequestOptions = new PublishTelemetryRequestOptions().setMessageId(testResourceNamer.randomUuid()); StepVerifier.create(client.publishTelemetryWithResponse( roomWithWifiTwinId, "{\"Telemetry1\": 5}", telemetryRequestOptions, Context.NONE)) .assertNext(createResponse -> assertThat(createResponse.getStatusCode()) .as("Publish telemetry succeeds") .isEqualTo(HttpURLConnection.HTTP_NO_CONTENT)) .verifyComplete(); PublishTelemetryRequestOptions componentTelemetryRequestOptions = new PublishTelemetryRequestOptions().setMessageId(testResourceNamer.randomUuid()); Dictionary<String, Integer> telemetryPayload = new Hashtable<>(); telemetryPayload.put("ComponentTelemetry1", 9); String telemetryStringPayload = new ObjectMapper().writeValueAsString(telemetryPayload); StepVerifier.create(client.publishComponentTelemetryWithResponse( roomWithWifiTwinId, TestAssetDefaults.WIFI_COMPONENT_NAME, telemetryStringPayload, componentTelemetryRequestOptions, Context.NONE)) .assertNext(createResponse -> assertThat(createResponse.getStatusCode()) .as("Publish telemetry succeeds") .isEqualTo(HttpURLConnection.HTTP_NO_CONTENT)) .verifyComplete(); } catch (Exception ex) { fail("Failure in executing a step in the test case", ex); } finally { try { if (roomWithWifiTwinId != null){ client.deleteDigitalTwin(roomWithWifiTwinId).block(); } if (roomWithWifiModelId != null){ client.deleteModel(roomWithWifiModelId).block(); } if(wifiModelId != null){ client.deleteModel(wifiModelId).block(); } } catch (Exception ex) { fail("Test cleanup failed", ex); } } }
class PublishTelemetryAsyncTests extends PublishTelemetryTestBase { private final ClientLogger logger = new ClientLogger(PublishTelemetryAsyncTests.class); @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.digitaltwins.core.TestHelper @Override private void createModelsAndTwins(DigitalTwinsAsyncClient asyncClient, String wifiModelId, String roomWithWifiModelId, String roomWithWifiTwinId){ String wifiModelPayload = TestAssetsHelper.getWifiModelPayload(wifiModelId); String roomWithWifiModelPayload = TestAssetsHelper.getRoomWithWifiModelPayload(roomWithWifiModelId, wifiModelId, TestAssetDefaults.WIFI_COMPONENT_NAME); StepVerifier .create(asyncClient.createModels(new ArrayList<>(Arrays.asList(wifiModelPayload, roomWithWifiModelPayload)))) .assertNext(createResponseList -> logger.info("Created {} models successfully", createResponseList.size())) .verifyComplete(); String roomWithWifiTwinPayload = TestAssetsHelper.getRoomWithWifiTwinPayload(roomWithWifiModelId, TestAssetDefaults.WIFI_COMPONENT_NAME); StepVerifier .create(asyncClient.createDigitalTwin(roomWithWifiTwinId, roomWithWifiTwinPayload)) .assertNext(createResponse -> logger.info("Created {} digitalTwin successfully", createResponse)) .verifyComplete(); } }
class PublishTelemetryAsyncTests extends PublishTelemetryTestBase { private final ClientLogger logger = new ClientLogger(PublishTelemetryAsyncTests.class); @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.digitaltwins.core.TestHelper @Override private void createModelsAndTwins(DigitalTwinsAsyncClient asyncClient, String wifiModelId, String roomWithWifiModelId, String roomWithWifiTwinId){ String wifiModelPayload = TestAssetsHelper.getWifiModelPayload(wifiModelId); String roomWithWifiModelPayload = TestAssetsHelper.getRoomWithWifiModelPayload(roomWithWifiModelId, wifiModelId, TestAssetDefaults.WIFI_COMPONENT_NAME); StepVerifier .create(asyncClient.createModels(new ArrayList<>(Arrays.asList(wifiModelPayload, roomWithWifiModelPayload)))) .assertNext(createResponseList -> logger.info("Created {} models successfully", createResponseList.size())) .verifyComplete(); String roomWithWifiTwinPayload = TestAssetsHelper.getRoomWithWifiTwinPayload(roomWithWifiModelId, TestAssetDefaults.WIFI_COMPONENT_NAME); StepVerifier .create(asyncClient.createDigitalTwin(roomWithWifiTwinId, roomWithWifiTwinPayload)) .assertNext(createResponse -> logger.info("Created {} digitalTwin successfully", createResponse)) .verifyComplete(); } }
Is this `if` check a relevant change to this bug fix? If not, could you please extract it to a separate hotfix commit in this PR to avoid confusions?
public void recycle(MemorySegment segment) { Buffer releasedFloatingBuffer = null; synchronized (bufferQueue) { try { if (inputChannel.isReleased()) { globalPool.recycleMemorySegments(Collections.singletonList(segment)); } else { releasedFloatingBuffer = bufferQueue.addExclusiveBuffer( new NetworkBuffer(segment, this), numRequiredBuffers); } } catch (Throwable t) { ExceptionUtils.rethrow(t); } finally { bufferQueue.notifyAll(); } } try { if (releasedFloatingBuffer == null) { if (!inputChannel.isReleased()) { inputChannel.notifyBufferAvailable(1); } } else { releasedFloatingBuffer.recycleBuffer(); } } catch (Throwable t) { ExceptionUtils.rethrow(t); } }
}
public void recycle(MemorySegment segment) { @Nullable Buffer releasedFloatingBuffer = null; synchronized (bufferQueue) { try { if (inputChannel.isReleased()) { globalPool.recycleMemorySegments(Collections.singletonList(segment)); return; } else { releasedFloatingBuffer = bufferQueue.addExclusiveBuffer( new NetworkBuffer(segment, this), numRequiredBuffers); } } catch (Throwable t) { ExceptionUtils.rethrow(t); } finally { bufferQueue.notifyAll(); } } if (releasedFloatingBuffer != null) { releasedFloatingBuffer.recycleBuffer(); } else { try { inputChannel.notifyBufferAvailable(1); } catch (Throwable t) { ExceptionUtils.rethrow(t); } } }
class BufferManager implements BufferListener, BufferRecycler { /** The available buffer queue wraps both exclusive and requested floating buffers. */ private final AvailableBufferQueue bufferQueue = new AvailableBufferQueue(); /** The buffer provider for requesting exclusive buffers. */ private final MemorySegmentProvider globalPool; /** The input channel to own this buffer manager. */ private final InputChannel inputChannel; /** * The tag indicates whether it is waiting for additional floating buffers from the buffer pool. */ @GuardedBy("bufferQueue") private boolean isWaitingForFloatingBuffers; /** The total number of required buffers for the respective input channel. */ @GuardedBy("bufferQueue") private int numRequiredBuffers; public BufferManager( MemorySegmentProvider globalPool, InputChannel inputChannel, int numRequiredBuffers) { this.globalPool = checkNotNull(globalPool); this.inputChannel = checkNotNull(inputChannel); checkArgument(numRequiredBuffers >= 0); this.numRequiredBuffers = numRequiredBuffers; } @Nullable Buffer requestBuffer() { synchronized (bufferQueue) { return bufferQueue.takeBuffer(); } } Buffer requestBufferBlocking() throws InterruptedException { synchronized (bufferQueue) { Buffer buffer; while ((buffer = bufferQueue.takeBuffer()) == null) { if (inputChannel.isReleased()) { throw new CancelTaskException( "Input channel [" + inputChannel.channelInfo + "] has already been released."); } if (!isWaitingForFloatingBuffers) { BufferPool bufferPool = inputChannel.inputGate.getBufferPool(); buffer = bufferPool.requestBuffer(); if (buffer == null && shouldContinueRequest(bufferPool)) { continue; } } if (buffer != null) { return buffer; } bufferQueue.wait(); } return buffer; } } private boolean shouldContinueRequest(BufferPool bufferPool) { if (bufferPool.addBufferListener(this)) { isWaitingForFloatingBuffers = true; numRequiredBuffers = 1; return false; } else if (bufferPool.isDestroyed()) { throw new CancelTaskException("Local buffer pool has already been released."); } else { return true; } } /** Requests exclusive buffers from the provider. */ void requestExclusiveBuffers(int numExclusiveBuffers) throws IOException { Collection<MemorySegment> segments = globalPool.requestMemorySegments(numExclusiveBuffers); checkArgument( !segments.isEmpty(), "The number of exclusive buffers per channel should be larger than 0."); synchronized (bufferQueue) { checkState( unsynchronizedGetFloatingBuffersAvailable() == 0, "Bug in buffer allocation logic: floating buffer is allocated before exclusive buffers are initialized."); for (MemorySegment segment : segments) { bufferQueue.addExclusiveBuffer( new NetworkBuffer(segment, this), numRequiredBuffers); } } } /** * Requests floating buffers from the buffer pool based on the given required amount, and * returns the actual requested amount. If the required amount is not fully satisfied, it will * register as a listener. */ int requestFloatingBuffers(int numRequired) { int numRequestedBuffers = 0; synchronized (bufferQueue) { if (inputChannel.isReleased()) { return numRequestedBuffers; } numRequiredBuffers = numRequired; while (bufferQueue.getAvailableBufferSize() < numRequiredBuffers && !isWaitingForFloatingBuffers) { BufferPool bufferPool = inputChannel.inputGate.getBufferPool(); Buffer buffer = bufferPool.requestBuffer(); if (buffer != null) { bufferQueue.addFloatingBuffer(buffer); numRequestedBuffers++; } else if (bufferPool.addBufferListener(this)) { isWaitingForFloatingBuffers = true; break; } } } return numRequestedBuffers; } /** * Exclusive buffer is recycled to this channel manager directly and it may trigger return extra * floating buffer based on <tt>numRequiredBuffers</tt>. * * @param segment The exclusive segment of this channel. */ @Override void releaseFloatingBuffers() { synchronized (bufferQueue) { numRequiredBuffers = 0; bufferQueue.releaseFloatingBuffers(); } } /** Recycles all the exclusive and floating buffers from the given buffer queue. */ void releaseAllBuffers(ArrayDeque<Buffer> buffers) throws IOException { final List<MemorySegment> exclusiveRecyclingSegments = new ArrayList<>(); Exception err = null; Buffer buffer; while ((buffer = buffers.poll()) != null) { try { if (buffer.getRecycler() == BufferManager.this) { exclusiveRecyclingSegments.add(buffer.getMemorySegment()); } else { buffer.recycleBuffer(); } } catch (Exception e) { err = firstOrSuppressed(e, err); } } try { synchronized (bufferQueue) { bufferQueue.releaseAll(exclusiveRecyclingSegments); bufferQueue.notifyAll(); } } catch (Exception e) { err = firstOrSuppressed(e, err); } try { if (exclusiveRecyclingSegments.size() > 0) { globalPool.recycleMemorySegments(exclusiveRecyclingSegments); } } catch (Exception e) { err = firstOrSuppressed(e, err); } if (err != null) { throw err instanceof IOException ? (IOException) err : new IOException(err); } } /** * The buffer pool notifies this listener of an available floating buffer. If the listener is * released or currently does not need extra buffers, the buffer should be returned to the * buffer pool. Otherwise, the buffer will be added into the <tt>bufferQueue</tt>. * * @param buffer Buffer that becomes available in buffer pool. * @return NotificationResult indicates whether this channel accepts the buffer and is waiting * for more floating buffers. */ @Override public BufferListener.NotificationResult notifyBufferAvailable(Buffer buffer) { BufferListener.NotificationResult notificationResult = BufferListener.NotificationResult.BUFFER_NOT_USED; if (inputChannel.isReleased()) { return notificationResult; } try { synchronized (bufferQueue) { checkState( isWaitingForFloatingBuffers, "This channel should be waiting for floating buffers."); if (inputChannel.isReleased() || bufferQueue.getAvailableBufferSize() >= numRequiredBuffers) { isWaitingForFloatingBuffers = false; return notificationResult; } bufferQueue.addFloatingBuffer(buffer); bufferQueue.notifyAll(); if (bufferQueue.getAvailableBufferSize() == numRequiredBuffers) { isWaitingForFloatingBuffers = false; notificationResult = BufferListener.NotificationResult.BUFFER_USED_NO_NEED_MORE; } else { notificationResult = BufferListener.NotificationResult.BUFFER_USED_NEED_MORE; } } if (notificationResult != NotificationResult.BUFFER_NOT_USED) { inputChannel.notifyBufferAvailable(1); } } catch (Throwable t) { inputChannel.setError(t); } return notificationResult; } @Override public void notifyBufferDestroyed() { } @VisibleForTesting int unsynchronizedGetNumberOfRequiredBuffers() { return numRequiredBuffers; } @VisibleForTesting boolean unsynchronizedIsWaitingForFloatingBuffers() { return isWaitingForFloatingBuffers; } @VisibleForTesting int getNumberOfAvailableBuffers() { synchronized (bufferQueue) { return bufferQueue.getAvailableBufferSize(); } } int unsynchronizedGetAvailableExclusiveBuffers() { return bufferQueue.exclusiveBuffers.size(); } int unsynchronizedGetFloatingBuffersAvailable() { return bufferQueue.floatingBuffers.size(); } /** * Manages the exclusive and floating buffers of this channel, and handles the internal buffer * related logic. */ static final class AvailableBufferQueue { /** The current available floating buffers from the fixed buffer pool. */ final ArrayDeque<Buffer> floatingBuffers; /** The current available exclusive buffers from the global buffer pool. */ final ArrayDeque<Buffer> exclusiveBuffers; AvailableBufferQueue() { this.exclusiveBuffers = new ArrayDeque<>(); this.floatingBuffers = new ArrayDeque<>(); } /** * Adds an exclusive buffer (back) into the queue and releases one floating buffer if the * number of available buffers in queue is more than the required amount. * * @param buffer The exclusive buffer to add * @param numRequiredBuffers The number of required buffers * @return An released floating buffer, may be null if the numRequiredBuffers is not met. */ @Nullable Buffer addExclusiveBuffer(Buffer buffer, int numRequiredBuffers) { exclusiveBuffers.add(buffer); if (getAvailableBufferSize() > numRequiredBuffers) { return floatingBuffers.poll(); } return null; } void addFloatingBuffer(Buffer buffer) { floatingBuffers.add(buffer); } /** * Takes the floating buffer first in order to make full use of floating buffers reasonably. * * @return An available floating or exclusive buffer, may be null if the channel is * released. */ @Nullable Buffer takeBuffer() { if (floatingBuffers.size() > 0) { return floatingBuffers.poll(); } else { return exclusiveBuffers.poll(); } } /** * The floating buffer is recycled to local buffer pool directly, and the exclusive buffer * will be gathered to return to global buffer pool later. * * @param exclusiveSegments The list that we will add exclusive segments into. */ void releaseAll(List<MemorySegment> exclusiveSegments) { Buffer buffer; while ((buffer = floatingBuffers.poll()) != null) { buffer.recycleBuffer(); } while ((buffer = exclusiveBuffers.poll()) != null) { exclusiveSegments.add(buffer.getMemorySegment()); } } void releaseFloatingBuffers() { Buffer buffer; while ((buffer = floatingBuffers.poll()) != null) { buffer.recycleBuffer(); } } int getAvailableBufferSize() { return floatingBuffers.size() + exclusiveBuffers.size(); } } }
class BufferManager implements BufferListener, BufferRecycler { /** The available buffer queue wraps both exclusive and requested floating buffers. */ private final AvailableBufferQueue bufferQueue = new AvailableBufferQueue(); /** The buffer provider for requesting exclusive buffers. */ private final MemorySegmentProvider globalPool; /** The input channel to own this buffer manager. */ private final InputChannel inputChannel; /** * The tag indicates whether it is waiting for additional floating buffers from the buffer pool. */ @GuardedBy("bufferQueue") private boolean isWaitingForFloatingBuffers; /** The total number of required buffers for the respective input channel. */ @GuardedBy("bufferQueue") private int numRequiredBuffers; public BufferManager( MemorySegmentProvider globalPool, InputChannel inputChannel, int numRequiredBuffers) { this.globalPool = checkNotNull(globalPool); this.inputChannel = checkNotNull(inputChannel); checkArgument(numRequiredBuffers >= 0); this.numRequiredBuffers = numRequiredBuffers; } @Nullable Buffer requestBuffer() { synchronized (bufferQueue) { return bufferQueue.takeBuffer(); } } Buffer requestBufferBlocking() throws InterruptedException { synchronized (bufferQueue) { Buffer buffer; while ((buffer = bufferQueue.takeBuffer()) == null) { if (inputChannel.isReleased()) { throw new CancelTaskException( "Input channel [" + inputChannel.channelInfo + "] has already been released."); } if (!isWaitingForFloatingBuffers) { BufferPool bufferPool = inputChannel.inputGate.getBufferPool(); buffer = bufferPool.requestBuffer(); if (buffer == null && shouldContinueRequest(bufferPool)) { continue; } } if (buffer != null) { return buffer; } bufferQueue.wait(); } return buffer; } } private boolean shouldContinueRequest(BufferPool bufferPool) { if (bufferPool.addBufferListener(this)) { isWaitingForFloatingBuffers = true; numRequiredBuffers = 1; return false; } else if (bufferPool.isDestroyed()) { throw new CancelTaskException("Local buffer pool has already been released."); } else { return true; } } /** Requests exclusive buffers from the provider. */ void requestExclusiveBuffers(int numExclusiveBuffers) throws IOException { Collection<MemorySegment> segments = globalPool.requestMemorySegments(numExclusiveBuffers); checkArgument( !segments.isEmpty(), "The number of exclusive buffers per channel should be larger than 0."); synchronized (bufferQueue) { checkState( unsynchronizedGetFloatingBuffersAvailable() == 0, "Bug in buffer allocation logic: floating buffer is allocated before exclusive buffers are initialized."); for (MemorySegment segment : segments) { bufferQueue.addExclusiveBuffer( new NetworkBuffer(segment, this), numRequiredBuffers); } } } /** * Requests floating buffers from the buffer pool based on the given required amount, and * returns the actual requested amount. If the required amount is not fully satisfied, it will * register as a listener. */ int requestFloatingBuffers(int numRequired) { int numRequestedBuffers = 0; synchronized (bufferQueue) { if (inputChannel.isReleased()) { return numRequestedBuffers; } numRequiredBuffers = numRequired; while (bufferQueue.getAvailableBufferSize() < numRequiredBuffers && !isWaitingForFloatingBuffers) { BufferPool bufferPool = inputChannel.inputGate.getBufferPool(); Buffer buffer = bufferPool.requestBuffer(); if (buffer != null) { bufferQueue.addFloatingBuffer(buffer); numRequestedBuffers++; } else if (bufferPool.addBufferListener(this)) { isWaitingForFloatingBuffers = true; break; } } } return numRequestedBuffers; } /** * Exclusive buffer is recycled to this channel manager directly and it may trigger return extra * floating buffer based on <tt>numRequiredBuffers</tt>. * * @param segment The exclusive segment of this channel. */ @Override void releaseFloatingBuffers() { synchronized (bufferQueue) { numRequiredBuffers = 0; bufferQueue.releaseFloatingBuffers(); } } /** Recycles all the exclusive and floating buffers from the given buffer queue. */ void releaseAllBuffers(ArrayDeque<Buffer> buffers) throws IOException { final List<MemorySegment> exclusiveRecyclingSegments = new ArrayList<>(); Exception err = null; Buffer buffer; while ((buffer = buffers.poll()) != null) { try { if (buffer.getRecycler() == BufferManager.this) { exclusiveRecyclingSegments.add(buffer.getMemorySegment()); } else { buffer.recycleBuffer(); } } catch (Exception e) { err = firstOrSuppressed(e, err); } } try { synchronized (bufferQueue) { bufferQueue.releaseAll(exclusiveRecyclingSegments); bufferQueue.notifyAll(); } } catch (Exception e) { err = firstOrSuppressed(e, err); } try { if (exclusiveRecyclingSegments.size() > 0) { globalPool.recycleMemorySegments(exclusiveRecyclingSegments); } } catch (Exception e) { err = firstOrSuppressed(e, err); } if (err != null) { throw err instanceof IOException ? (IOException) err : new IOException(err); } } /** * The buffer pool notifies this listener of an available floating buffer. If the listener is * released or currently does not need extra buffers, the buffer should be returned to the * buffer pool. Otherwise, the buffer will be added into the <tt>bufferQueue</tt>. * * @param buffer Buffer that becomes available in buffer pool. * @return NotificationResult indicates whether this channel accepts the buffer and is waiting * for more floating buffers. */ @Override public BufferListener.NotificationResult notifyBufferAvailable(Buffer buffer) { BufferListener.NotificationResult notificationResult = BufferListener.NotificationResult.BUFFER_NOT_USED; if (inputChannel.isReleased()) { return notificationResult; } try { synchronized (bufferQueue) { checkState( isWaitingForFloatingBuffers, "This channel should be waiting for floating buffers."); if (inputChannel.isReleased() || bufferQueue.getAvailableBufferSize() >= numRequiredBuffers) { isWaitingForFloatingBuffers = false; return notificationResult; } bufferQueue.addFloatingBuffer(buffer); bufferQueue.notifyAll(); if (bufferQueue.getAvailableBufferSize() == numRequiredBuffers) { isWaitingForFloatingBuffers = false; notificationResult = BufferListener.NotificationResult.BUFFER_USED_NO_NEED_MORE; } else { notificationResult = BufferListener.NotificationResult.BUFFER_USED_NEED_MORE; } } if (notificationResult != NotificationResult.BUFFER_NOT_USED) { inputChannel.notifyBufferAvailable(1); } } catch (Throwable t) { inputChannel.setError(t); } return notificationResult; } @Override public void notifyBufferDestroyed() { } @VisibleForTesting int unsynchronizedGetNumberOfRequiredBuffers() { return numRequiredBuffers; } @VisibleForTesting boolean unsynchronizedIsWaitingForFloatingBuffers() { return isWaitingForFloatingBuffers; } @VisibleForTesting int getNumberOfAvailableBuffers() { synchronized (bufferQueue) { return bufferQueue.getAvailableBufferSize(); } } int unsynchronizedGetAvailableExclusiveBuffers() { return bufferQueue.exclusiveBuffers.size(); } int unsynchronizedGetFloatingBuffersAvailable() { return bufferQueue.floatingBuffers.size(); } /** * Manages the exclusive and floating buffers of this channel, and handles the internal buffer * related logic. */ static final class AvailableBufferQueue { /** The current available floating buffers from the fixed buffer pool. */ final ArrayDeque<Buffer> floatingBuffers; /** The current available exclusive buffers from the global buffer pool. */ final ArrayDeque<Buffer> exclusiveBuffers; AvailableBufferQueue() { this.exclusiveBuffers = new ArrayDeque<>(); this.floatingBuffers = new ArrayDeque<>(); } /** * Adds an exclusive buffer (back) into the queue and releases one floating buffer if the * number of available buffers in queue is more than the required amount. If floating buffer * is released, the total amount of available buffers after adding this exclusive buffer has * not changed, and no new buffers are available. The caller is responsible for recycling * the release/returned floating buffer. * * @param buffer The exclusive buffer to add * @param numRequiredBuffers The number of required buffers * @return An released floating buffer, may be null if the numRequiredBuffers is not met. */ @Nullable Buffer addExclusiveBuffer(Buffer buffer, int numRequiredBuffers) { exclusiveBuffers.add(buffer); if (getAvailableBufferSize() > numRequiredBuffers) { return floatingBuffers.poll(); } return null; } void addFloatingBuffer(Buffer buffer) { floatingBuffers.add(buffer); } /** * Takes the floating buffer first in order to make full use of floating buffers reasonably. * * @return An available floating or exclusive buffer, may be null if the channel is * released. */ @Nullable Buffer takeBuffer() { if (floatingBuffers.size() > 0) { return floatingBuffers.poll(); } else { return exclusiveBuffers.poll(); } } /** * The floating buffer is recycled to local buffer pool directly, and the exclusive buffer * will be gathered to return to global buffer pool later. * * @param exclusiveSegments The list that we will add exclusive segments into. */ void releaseAll(List<MemorySegment> exclusiveSegments) { Buffer buffer; while ((buffer = floatingBuffers.poll()) != null) { buffer.recycleBuffer(); } while ((buffer = exclusiveBuffers.poll()) != null) { exclusiveSegments.add(buffer.getMemorySegment()); } } void releaseFloatingBuffers() { Buffer buffer; while ((buffer = floatingBuffers.poll()) != null) { buffer.recycleBuffer(); } } int getAvailableBufferSize() { return floatingBuffers.size() + exclusiveBuffers.size(); } } }
This should be performed after closing the output stream
private static void runCompressionGzip(File oldFile) { File gzippedFile = new File(oldFile.getPath() + ".gz"); try (GZIPOutputStream compressor = new GZIPOutputStream(new FileOutputStream(gzippedFile), 0x100000); FileInputStream inputStream = new FileInputStream(oldFile)) { byte[] buffer = new byte[0x400000]; long totalBytesRead = 0; NativeIO nativeIO = new NativeIO(); for (int read = inputStream.read(buffer); read > 0; read = inputStream.read(buffer)) { compressor.write(buffer, 0, read); nativeIO.dropPartialFileFromCache(inputStream.getFD(), totalBytesRead, read, false); totalBytesRead += read; } compressor.finish(); compressor.flush(); oldFile.delete(); nativeIO.dropFileFromCache(gzippedFile); } catch (IOException e) { logger.warning("Got '" + e + "' while compressing '" + oldFile.getPath() + "'."); } }
nativeIO.dropFileFromCache(gzippedFile);
private static void runCompressionGzip(File oldFile) { File gzippedFile = new File(oldFile.getPath() + ".gz"); NativeIO nativeIO = new NativeIO(); try (GZIPOutputStream compressor = new GZIPOutputStream(new FileOutputStream(gzippedFile), 0x100000); FileInputStream inputStream = new FileInputStream(oldFile)) { byte[] buffer = new byte[0x400000]; long totalBytesRead = 0; for (int read = inputStream.read(buffer); read > 0; read = inputStream.read(buffer)) { compressor.write(buffer, 0, read); nativeIO.dropPartialFileFromCache(inputStream.getFD(), totalBytesRead, read, false); totalBytesRead += read; } compressor.finish(); compressor.flush(); } catch (IOException e) { logger.warning("Got '" + e + "' while compressing '" + oldFile.getPath() + "'."); } oldFile.delete(); nativeIO.dropFileFromCache(gzippedFile); }
class LogThread<LOGTYPE> extends Thread { long lastFlush = 0; private FileOutputStream currentOutputStream = null; private long nextRotationTime = 0; private final String filePattern; private String fileName; private long lastDropPosition = 0; private final LogWriter<LOGTYPE> logWriter; private final ArrayBlockingQueue<Operation<LOGTYPE>> logQueue = new ArrayBlockingQueue<>(100000); private final Compression compression; private final long[] rotationTimes; private final String symlinkName; private final ExecutorService executor = Executors.newCachedThreadPool(ThreadFactoryFactory.getDaemonThreadFactory("logfilehandler.compression")); private final NativeIO nativeIO = new NativeIO(); LogThread(LogWriter<LOGTYPE> logWriter, String filePattern, Compression compression, long[] rotationTimes, String symlinkName) { super("Logger"); setDaemon(true); this.logWriter = logWriter; this.filePattern = filePattern; this.compression = compression; this.rotationTimes = rotationTimes; this.symlinkName = (symlinkName != null && !symlinkName.isBlank()) ? symlinkName : null; } @Override public void run() { try { storeLogRecords(); } catch (InterruptedException e) { } catch (Exception e) { com.yahoo.protect.Process.logAndDie("Failed storing log records", e); } internalFlush(); } private void storeLogRecords() throws InterruptedException { while (!isInterrupted()) { Operation<LOGTYPE> r = logQueue.poll(100, TimeUnit.MILLISECONDS); if (r != null) { if (r.type == Operation.Type.flush) { internalFlush(); } else if (r.type == Operation.Type.close) { internalClose(); } else if (r.type == Operation.Type.rotate) { internalRotateNow(); lastFlush = System.nanoTime(); } else if (r.type == Operation.Type.log) { internalPublish(r.log.get()); flushIfOld(3, TimeUnit.SECONDS); } r.countDownLatch.countDown(); } else { flushIfOld(100, TimeUnit.MILLISECONDS); } } } private void flushIfOld(long age, TimeUnit unit) { long now = System.nanoTime(); if (TimeUnit.NANOSECONDS.toMillis(now - lastFlush) > unit.toMillis(age)) { internalFlush(); lastFlush = now; } } private synchronized void internalFlush() { try { FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) { if (compression == Compression.GZIP) { long newPos = currentOut.getChannel().position(); if (newPos > lastDropPosition + 102400) { nativeIO.dropPartialFileFromCache(currentOut.getFD(), lastDropPosition, newPos, true); lastDropPosition = newPos; } } else { currentOut.flush(); } } } catch (IOException e) { logger.warning("Failed dropping from cache : " + Exceptions.toMessageString(e)); } } private void internalClose() { try { internalFlush(); FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) currentOut.close(); } catch (Exception e) { logger.log(Level.WARNING, "Got error while closing log file", e); } } private void internalPublish(LOGTYPE r) { long now = System.currentTimeMillis(); if (nextRotationTime <= 0) { nextRotationTime = getNextRotationTime(now); } if (now > nextRotationTime || currentOutputStream == null) { internalRotateNow(); } try { FileOutputStream out = this.currentOutputStream; logWriter.write(r, out); out.write('\n'); } catch (IOException e) { logger.warning("Failed writing log record: " + Exceptions.toMessageString(e)); } } /** * Find next rotation after specified time. * * @param now the specified time; if zero, current time is used. * @return the next rotation time */ long getNextRotationTime(long now) { if (now <= 0) { now = System.currentTimeMillis(); } long nowTod = timeOfDayMillis(now); long next = 0; for (long rotationTime : rotationTimes) { if (nowTod < rotationTime) { next = rotationTime - nowTod + now; break; } } if (next == 0) { next = rotationTimes[0] + lengthOfDayMillis - nowTod + now; } return next; } private void checkAndCreateDir(String pathname) { int lastSlash = pathname.lastIndexOf("/"); if (lastSlash > -1) { String pathExcludingFilename = pathname.substring(0, lastSlash); File filepath = new File(pathExcludingFilename); if (!filepath.exists()) { filepath.mkdirs(); } } } private void internalRotateNow() { String oldFileName = fileName; long now = System.currentTimeMillis(); fileName = LogFormatter.insertDate(filePattern, now); internalFlush(); try { checkAndCreateDir(fileName); FileOutputStream os = new FileOutputStream(fileName, true); currentOutputStream = os; lastDropPosition = 0; LogFileDb.nowLoggingTo(fileName); } catch (IOException e) { throw new RuntimeException("Couldn't open log file '" + fileName + "'", e); } createSymlinkToCurrentFile(); nextRotationTime = 0; if ((oldFileName != null)) { File oldFile = new File(oldFileName); if (oldFile.exists()) { if (compression != Compression.NONE) { executor.execute(() -> runCompression(oldFile, compression)); } else { nativeIO.dropFileFromCache(oldFile); } } } } private static void runCompression(File oldFile, Compression compression) { switch (compression) { case ZSTD: runCompressionZstd(oldFile.toPath()); break; case GZIP: runCompressionGzip(oldFile); break; default: throw new IllegalArgumentException("Unknown compression " + compression); } } private static void runCompressionZstd(Path oldFile) { try { Path compressedFile = Paths.get(oldFile.toString() + ".zst"); Files.createFile(compressedFile); int bufferSize = 0x400000; byte[] buffer = new byte[bufferSize]; try (ZstdOuputStream out = new ZstdOuputStream(Files.newOutputStream(compressedFile), bufferSize); InputStream in = Files.newInputStream(oldFile)) { int read; while ((read = in.read(buffer)) >= 0) { out.write(buffer, 0, read); } out.flush(); } Files.delete(oldFile); } catch (IOException e) { logger.log(Level.WARNING, "Failed to compress log file with zstd: " + oldFile, e); } } /** * Name files by date - create a symlink with a constant name to the newest file */ private void createSymlinkToCurrentFile() { if (symlinkName == null) return; File f = new File(fileName); File f2 = new File(f.getParent(), symlinkName); String[] cmd = new String[]{"/bin/ln", "-sf", f.getName(), f2.getPath()}; try { int retval = new ProcessExecuter().exec(cmd).getFirst(); if (retval != 0) { logger.warning("Command '" + Arrays.toString(cmd) + "' + failed with exitcode=" + retval); } } catch (IOException e) { logger.warning("Got '" + e + "' while doing'" + Arrays.toString(cmd) + "'."); } } private static final long lengthOfDayMillis = 24 * 60 * 60 * 1000; private static long timeOfDayMillis(long time) { return time % lengthOfDayMillis; } }
class LogThread<LOGTYPE> extends Thread { private final Pollable<LOGTYPE> operationProvider; long lastFlush = 0; private FileOutputStream currentOutputStream = null; private long nextRotationTime = 0; private final String filePattern; private volatile String fileName; private long lastDropPosition = 0; private final LogWriter<LOGTYPE> logWriter; private final Compression compression; private final long[] rotationTimes; private final String symlinkName; private final ExecutorService executor = Executors.newCachedThreadPool(ThreadFactoryFactory.getDaemonThreadFactory("logfilehandler.compression")); private final NativeIO nativeIO = new NativeIO(); LogThread(LogWriter<LOGTYPE> logWriter, String filePattern, Compression compression, long[] rotationTimes, String symlinkName, Pollable<LOGTYPE> operationProvider) { super("Logger"); setDaemon(true); this.logWriter = logWriter; this.filePattern = filePattern; this.compression = compression; this.rotationTimes = rotationTimes; this.symlinkName = (symlinkName != null && !symlinkName.isBlank()) ? symlinkName : null; this.operationProvider = operationProvider; } @Override public void run() { try { handleLogOperations(); } catch (InterruptedException e) { } catch (Exception e) { Process.logAndDie("Failed storing log records", e); } internalFlush(); } private void handleLogOperations() throws InterruptedException { while (!isInterrupted()) { Operation<LOGTYPE> r = operationProvider.poll(); if (r != null) { if (r.type == Operation.Type.flush) { internalFlush(); } else if (r.type == Operation.Type.close) { internalClose(); } else if (r.type == Operation.Type.rotate) { internalRotateNow(); lastFlush = System.nanoTime(); } else if (r.type == Operation.Type.log) { internalPublish(r.log.get()); flushIfOld(3, TimeUnit.SECONDS); } r.countDownLatch.countDown(); } else { flushIfOld(100, TimeUnit.MILLISECONDS); } } } private void flushIfOld(long age, TimeUnit unit) { long now = System.nanoTime(); if (TimeUnit.NANOSECONDS.toMillis(now - lastFlush) > unit.toMillis(age)) { internalFlush(); lastFlush = now; } } private synchronized void internalFlush() { try { FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) { if (compression == Compression.GZIP) { long newPos = currentOut.getChannel().position(); if (newPos > lastDropPosition + 102400) { nativeIO.dropPartialFileFromCache(currentOut.getFD(), lastDropPosition, newPos, true); lastDropPosition = newPos; } } else { currentOut.flush(); } } } catch (IOException e) { logger.warning("Failed dropping from cache : " + Exceptions.toMessageString(e)); } } private void internalClose() { try { internalFlush(); FileOutputStream currentOut = this.currentOutputStream; if (currentOut != null) currentOut.close(); } catch (Exception e) { logger.log(Level.WARNING, "Got error while closing log file", e); } } private void internalPublish(LOGTYPE r) { long now = System.currentTimeMillis(); if (nextRotationTime <= 0) { nextRotationTime = getNextRotationTime(now); } if (now > nextRotationTime || currentOutputStream == null) { internalRotateNow(); } try { FileOutputStream out = this.currentOutputStream; logWriter.write(r, out); out.write('\n'); } catch (IOException e) { logger.warning("Failed writing log record: " + Exceptions.toMessageString(e)); } } /** * Find next rotation after specified time. * * @param now the specified time; if zero, current time is used. * @return the next rotation time */ long getNextRotationTime(long now) { if (now <= 0) { now = System.currentTimeMillis(); } long nowTod = timeOfDayMillis(now); long next = 0; for (long rotationTime : rotationTimes) { if (nowTod < rotationTime) { next = rotationTime - nowTod + now; break; } } if (next == 0) { next = rotationTimes[0] + lengthOfDayMillis - nowTod + now; } return next; } private void checkAndCreateDir(String pathname) { int lastSlash = pathname.lastIndexOf("/"); if (lastSlash > -1) { String pathExcludingFilename = pathname.substring(0, lastSlash); File filepath = new File(pathExcludingFilename); if (!filepath.exists()) { filepath.mkdirs(); } } } private void internalRotateNow() { String oldFileName = fileName; long now = System.currentTimeMillis(); fileName = LogFormatter.insertDate(filePattern, now); internalFlush(); try { checkAndCreateDir(fileName); FileOutputStream os = new FileOutputStream(fileName, true); currentOutputStream = os; lastDropPosition = 0; LogFileDb.nowLoggingTo(fileName); } catch (IOException e) { throw new RuntimeException("Couldn't open log file '" + fileName + "'", e); } createSymlinkToCurrentFile(); nextRotationTime = 0; if ((oldFileName != null)) { File oldFile = new File(oldFileName); if (oldFile.exists()) { if (compression != Compression.NONE) { executor.execute(() -> runCompression(oldFile, compression)); } else { nativeIO.dropFileFromCache(oldFile); } } } } private static void runCompression(File oldFile, Compression compression) { switch (compression) { case ZSTD: runCompressionZstd(oldFile.toPath()); break; case GZIP: runCompressionGzip(oldFile); break; default: throw new IllegalArgumentException("Unknown compression " + compression); } } private static void runCompressionZstd(Path oldFile) { try { Path compressedFile = Paths.get(oldFile.toString() + ".zst"); Files.createFile(compressedFile); int bufferSize = 0x400000; byte[] buffer = new byte[bufferSize]; try (ZstdOuputStream out = new ZstdOuputStream(Files.newOutputStream(compressedFile), bufferSize); InputStream in = Files.newInputStream(oldFile)) { int read; while ((read = in.read(buffer)) >= 0) { out.write(buffer, 0, read); } out.flush(); } Files.delete(oldFile); } catch (IOException e) { logger.log(Level.WARNING, "Failed to compress log file with zstd: " + oldFile, e); } } /** * Name files by date - create a symlink with a constant name to the newest file */ private void createSymlinkToCurrentFile() { if (symlinkName == null) return; File f = new File(fileName); File f2 = new File(f.getParent(), symlinkName); String[] cmd = new String[]{"/bin/ln", "-sf", f.getName(), f2.getPath()}; try { int retval = new ProcessExecuter().exec(cmd).getFirst(); if (retval != 0) { logger.warning("Command '" + Arrays.toString(cmd) + "' + failed with exitcode=" + retval); } } catch (IOException e) { logger.warning("Got '" + e + "' while doing'" + Arrays.toString(cmd) + "'."); } } private static final long lengthOfDayMillis = 24 * 60 * 60 * 1000; private static long timeOfDayMillis(long time) { return time % lengthOfDayMillis; } }
Yeah, didn't want to add extra dependency :) but yes, I agree. We could use that.
private String getVersionFromPomFile() { String fileName = "pom.xml"; String versionStartTag = "<version>"; String versionEndTag = "</version>"; File file = new File(fileName); try { BufferedReader bufferedReader = new BufferedReader(new FileReader(file)); String line; while((line = bufferedReader.readLine()) != null) { if (line.contains(versionStartTag) && line.contains("azure-cosmos")) { int startIndex = line.indexOf(versionStartTag); int endIndex = line.indexOf(versionEndTag); return line.substring(startIndex + versionStartTag.length(), endIndex); } } } catch (IOException e) { throw new RuntimeException("Error reading file " + fileName, e); } return null; }
try {
private String getVersionFromPomFile() { String fileName = "pom.xml"; String versionStartTag = "<version>"; String versionEndTag = "</version>"; File file = new File(fileName); try { BufferedReader bufferedReader = new BufferedReader(new FileReader(file)); String line; while((line = bufferedReader.readLine()) != null) { if (line.contains(versionStartTag) && line.contains("azure-cosmos")) { int startIndex = line.indexOf(versionStartTag); int endIndex = line.indexOf(versionEndTag); return line.substring(startIndex + versionStartTag.length(), endIndex); } } } catch (IOException e) { throw new RuntimeException("Error reading file " + fileName, e); } return null; }
class AzureCosmosPropertiesTest { @Test(groups = "unit") public void verifyAzureCosmosProperties() { Map<String, String> properties = CoreUtils.getProperties(HttpConstants.Versions.AZURE_COSMOS_PROPERTIES_FILE_NAME); assertThat(properties).isNotNull(); assertThat(properties).isNotEmpty(); assertThat(properties.get("version")).isNotNull(); assertThat(properties.get("name")).isNotNull(); } @Test(groups = "unit") public void verifyProjectVersion() { assertThat(HttpConstants.Versions.SDK_VERSION).isNotNull(); String pomFileVersion = getVersionFromPomFile(); assertThat(HttpConstants.Versions.SDK_VERSION).isEqualTo(pomFileVersion); } }
class AzureCosmosPropertiesTest { @Test(groups = "unit") public void verifyAzureCosmosProperties() { Map<String, String> properties = CoreUtils.getProperties(HttpConstants.Versions.AZURE_COSMOS_PROPERTIES_FILE_NAME); assertThat(properties).isNotNull(); assertThat(properties).isNotEmpty(); assertThat(properties.get("version")).isNotNull(); assertThat(properties.get("name")).isNotNull(); } @Test(groups = "unit") public void verifyProjectVersion() { assertThat(HttpConstants.Versions.SDK_VERSION).isNotNull(); String pomFileVersion = getVersionFromPomFile(); assertThat(HttpConstants.Versions.SDK_VERSION).isEqualTo(pomFileVersion); } }
Are you afraid that the `System.getProperty("user.home")` is cached at build time?
public String getObjectStoreDir() { return System.getProperty("user.home") + File.separator + "ObjectStore"; }
return System.getProperty("user.home") + File.separator + "ObjectStore";
public String getObjectStoreDir() { return System.getProperty("user.home") + File.separator + "ObjectStore"; }
class ObjectStoreEnvironmentBeanSubstitution { /** * @return fixed ObjectStore path resolved during runtime */ @Substitute }
class ObjectStoreEnvironmentBeanSubstitution { /** * @return fixed ObjectStore path resolved during runtime */ @Substitute }
@IrushiL, Thank you for pointing out the issue here. Fixed the logical error.
public XMLNamespaceDeclarationNode transform(XMLNamespaceDeclarationNode xMLNamespaceDeclarationNode) { Token xmlnsKeyword = getToken(xMLNamespaceDeclarationNode.xmlnsKeyword()); ExpressionNode namespaceuri = this.modifyNode(xMLNamespaceDeclarationNode.namespaceuri()); Token asKeyword = getToken(xMLNamespaceDeclarationNode.asKeyword().orElse(null)); IdentifierToken namespacePrefix = this.modifyNode(xMLNamespaceDeclarationNode.namespacePrefix().orElse(null)); Token semicolonToken = getToken(xMLNamespaceDeclarationNode.semicolonToken()); if (asKeyword != null || namespacePrefix != null) { xMLNamespaceDeclarationNode = xMLNamespaceDeclarationNode.modify() .withNamespacePrefix(namespacePrefix) .withAsKeyword(formatToken(asKeyword, 1, 1, 0, 0)) .apply(); } return xMLNamespaceDeclarationNode.modify() .withNamespaceuri(namespaceuri) .withXmlnsKeyword(formatToken(xmlnsKeyword, 3, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .apply(); }
if (asKeyword != null || namespacePrefix != null) {
public XMLNamespaceDeclarationNode transform(XMLNamespaceDeclarationNode xMLNamespaceDeclarationNode) { Token xmlnsKeyword = getToken(xMLNamespaceDeclarationNode.xmlnsKeyword()); ExpressionNode namespaceuri = this.modifyNode(xMLNamespaceDeclarationNode.namespaceuri()); Token asKeyword = getToken(xMLNamespaceDeclarationNode.asKeyword().orElse(null)); IdentifierToken namespacePrefix = this.modifyNode(xMLNamespaceDeclarationNode.namespacePrefix().orElse(null)); Token semicolonToken = getToken(xMLNamespaceDeclarationNode.semicolonToken()); int startColumn = getStartColumn(xMLNamespaceDeclarationNode, xMLNamespaceDeclarationNode.kind(), true); if (asKeyword != null) { xMLNamespaceDeclarationNode = xMLNamespaceDeclarationNode.modify() .withAsKeyword(formatToken(asKeyword, 1, 1, 0, 0)) .apply(); } if (namespacePrefix != null) { xMLNamespaceDeclarationNode = xMLNamespaceDeclarationNode.modify() .withNamespacePrefix(namespacePrefix) .apply(); } return xMLNamespaceDeclarationNode.modify() .withNamespaceuri(namespaceuri) .withXmlnsKeyword(formatToken(xmlnsKeyword, startColumn, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .apply(); }
class FormattingTreeModifier extends TreeModifier { private FormattingOptions formattingOptions; private LineRange lineRange; @Override public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) { if (!isInLineRange(importDeclarationNode)) { return importDeclarationNode; } Token importKeyword = getToken(importDeclarationNode.importKeyword()); Token semicolon = getToken(importDeclarationNode.semicolon()); SeparatedNodeList<IdentifierToken> moduleNames = this.modifySeparatedNodeList( importDeclarationNode.moduleName()); ImportOrgNameNode orgName = this.modifyNode(importDeclarationNode.orgName().orElse(null)); ImportPrefixNode prefix = this.modifyNode(importDeclarationNode.prefix().orElse(null)); ImportVersionNode version = this.modifyNode(importDeclarationNode.version().orElse(null)); if (orgName != null) { importDeclarationNode = importDeclarationNode.modify() .withOrgName(orgName).apply(); } if (prefix != null) { importDeclarationNode = importDeclarationNode.modify() .withPrefix(prefix).apply(); } if (version != null) { importDeclarationNode = importDeclarationNode.modify() .withVersion(version).apply(); } return importDeclarationNode.modify() .withImportKeyword(formatToken(importKeyword, 0, 0, 0, 0)) .withModuleName(moduleNames) .withSemicolon(formatToken(semicolon, 0, 0, 0, 1)) .apply(); } @Override public ImportOrgNameNode transform(ImportOrgNameNode importOrgNameNode) { if (!isInLineRange(importOrgNameNode)) { return importOrgNameNode; } Token orgName = getToken(importOrgNameNode.orgName()); Token slashToken = getToken(importOrgNameNode.slashToken()); return importOrgNameNode.modify() .withOrgName(formatToken(orgName, 1, 0, 0, 0)) .withSlashToken(formatToken(slashToken, 0, 0, 0, 0)) .apply(); } @Override public ImportPrefixNode transform(ImportPrefixNode importPrefixNode) { if (!isInLineRange(importPrefixNode)) { return importPrefixNode; } Token asKeyword = getToken(importPrefixNode.asKeyword()); Token prefix = getToken(importPrefixNode.prefix()); return importPrefixNode.modify() .withAsKeyword(formatToken(asKeyword, 1, 0, 0, 0)) .withPrefix(formatToken(prefix, 1, 0, 0, 0)) .apply(); } @Override public ImportVersionNode transform(ImportVersionNode importVersionNode) { if (!isInLineRange(importVersionNode)) { return importVersionNode; } Token versionKeyword = getToken(importVersionNode.versionKeyword()); SeparatedNodeList<Token> versionNumber = this.modifySeparatedNodeList(importVersionNode.versionNumber()); return importVersionNode.modify() .withVersionKeyword(formatToken(versionKeyword, 1, 1, 0, 0)) .withVersionNumber(versionNumber) .apply(); } @Override public IdentifierToken transform(IdentifierToken identifier) { if (!isInLineRange(identifier)) { return identifier; } Token identifierToken = getToken(identifier); return (IdentifierToken) formatToken(identifierToken, 0, 0, 0, 0); } @Override public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) { if (!isInLineRange(functionDefinitionNode)) { return functionDefinitionNode; } MetadataNode metadata = this.modifyNode(functionDefinitionNode.metadata().orElse(null)); NodeList<Token> qualifierList = this.modifyNodeList(functionDefinitionNode.qualifierList()); Token functionKeyword = getToken(functionDefinitionNode.functionKeyword()); Token functionName = getToken(functionDefinitionNode.functionName()); FunctionSignatureNode functionSignatureNode = this.modifyNode(functionDefinitionNode.functionSignature()); FunctionBodyNode functionBodyNode = this.modifyNode(functionDefinitionNode.functionBody()); if (metadata != null) { functionDefinitionNode = functionDefinitionNode.modify() .withMetadata(metadata).apply(); } return functionDefinitionNode.modify() .withFunctionKeyword(formatToken(functionKeyword, 0, 0, 0, 0)) .withFunctionName((IdentifierToken) formatToken(functionName, 1, 0, 0, 0)) .withFunctionSignature(functionSignatureNode) .withQualifierList(qualifierList) .withFunctionBody(functionBodyNode) .apply(); } @Override public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) { if (!isInLineRange(functionSignatureNode)) { return functionSignatureNode; } Token openPara = getToken(functionSignatureNode.openParenToken()); Token closePara = getToken(functionSignatureNode.closeParenToken()); SeparatedNodeList<ParameterNode> parameters = this.modifySeparatedNodeList(functionSignatureNode.parameters()); ReturnTypeDescriptorNode returnTypeDesc = this.modifyNode(functionSignatureNode.returnTypeDesc().orElse(null)); if (returnTypeDesc != null) { functionSignatureNode = functionSignatureNode.modify() .withReturnTypeDesc(returnTypeDesc).apply(); } return functionSignatureNode.modify() .withOpenParenToken(formatToken(openPara, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closePara, 0, 0, 0, 0)) .withParameters(parameters) .apply(); } @Override public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) { if (!isInLineRange(returnTypeDescriptorNode)) { return returnTypeDescriptorNode; } Token returnsKeyword = getToken(returnTypeDescriptorNode.returnsKeyword()); NodeList<AnnotationNode> annotations = this.modifyNodeList(returnTypeDescriptorNode.annotations()); Node type = this.modifyNode(returnTypeDescriptorNode.type()); return returnTypeDescriptorNode.modify() .withAnnotations(annotations) .withReturnsKeyword(formatToken(returnsKeyword, 1, 1, 0, 0)) .withType(type) .apply(); } @Override public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) { if (!isInLineRange(optionalTypeDescriptorNode)) { return optionalTypeDescriptorNode; } Node typeDescriptor = this.modifyNode(optionalTypeDescriptorNode.typeDescriptor()); Token questionMarkToken = getToken(optionalTypeDescriptorNode.questionMarkToken()); return optionalTypeDescriptorNode.modify() .withTypeDescriptor(typeDescriptor) .withQuestionMarkToken(formatToken(questionMarkToken, 0, 0, 0, 0)) .apply(); } @Override public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) { if (!isInLineRange(requiredParameterNode)) { return requiredParameterNode; } Token paramName = getToken(requiredParameterNode.paramName().orElse(null)); NodeList<AnnotationNode> annotations = this.modifyNodeList(requiredParameterNode.annotations()); Node typeName = this.modifyNode(requiredParameterNode.typeName()); if (paramName != null) { requiredParameterNode = requiredParameterNode.modify() .withParamName(formatToken(paramName, 1, 0, 0, 0)).apply(); } return requiredParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .apply(); } @Override public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) { if (!isInLineRange(builtinSimpleNameReferenceNode)) { return builtinSimpleNameReferenceNode; } int startCol = getStartColumn(builtinSimpleNameReferenceNode, builtinSimpleNameReferenceNode.kind(), true); Token name = getToken(builtinSimpleNameReferenceNode.name()); return builtinSimpleNameReferenceNode.modify() .withName(formatToken(name, startCol, 0, 0, 0)) .apply(); } @Override public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) { if (!isInLineRange(functionBodyBlockNode)) { return functionBodyBlockNode; } int startColumn = getStartColumn(functionBodyBlockNode, functionBodyBlockNode.kind(), false); Token functionBodyOpenBrace = getToken(functionBodyBlockNode.openBraceToken()); Token functionBodyCloseBrace = getToken(functionBodyBlockNode.closeBraceToken()); NodeList<StatementNode> statements = this.modifyNodeList(functionBodyBlockNode.statements()); NamedWorkerDeclarator namedWorkerDeclarator = this.modifyNode(functionBodyBlockNode.namedWorkerDeclarator().orElse(null)); if (namedWorkerDeclarator != null) { functionBodyBlockNode = functionBodyBlockNode.modify() .withNamedWorkerDeclarator(namedWorkerDeclarator).apply(); } return functionBodyBlockNode.modify() .withOpenBraceToken(formatToken(functionBodyOpenBrace, 1, 0, 0, 1)) .withCloseBraceToken(formatToken(functionBodyCloseBrace, startColumn, 0, 0, 1)) .withStatements(statements) .apply(); } @Override public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) { if (!isInLineRange(expressionStatementNode)) { return expressionStatementNode; } ExpressionNode expression = this.modifyNode(expressionStatementNode.expression()); Token semicolonToken = expressionStatementNode.semicolonToken(); return expressionStatementNode.modify() .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) { if (!isInLineRange(functionCallExpressionNode)) { return functionCallExpressionNode; } NameReferenceNode functionName = this.modifyNode(functionCallExpressionNode.functionName()); Token functionCallOpenPara = getToken(functionCallExpressionNode.openParenToken()); Token functionCallClosePara = getToken(functionCallExpressionNode.closeParenToken()); SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(functionCallExpressionNode .arguments()); return functionCallExpressionNode.modify() .withFunctionName(functionName) .withOpenParenToken(formatToken(functionCallOpenPara, 0, 0, 0, 0)) .withCloseParenToken(formatToken(functionCallClosePara, 0, 0, 0, 0)) .withArguments(arguments) .apply(); } @Override public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) { if (!isInLineRange(qualifiedNameReferenceNode)) { return qualifiedNameReferenceNode; } int startCol = getStartColumn(qualifiedNameReferenceNode, qualifiedNameReferenceNode.kind(), false); Token modulePrefix = getToken(qualifiedNameReferenceNode.modulePrefix()); Token identifier = getToken(qualifiedNameReferenceNode.identifier()); Token colon = getToken((Token) qualifiedNameReferenceNode.colon()); return qualifiedNameReferenceNode.modify() .withModulePrefix(formatToken(modulePrefix, startCol, 0, 0, 0)) .withIdentifier((IdentifierToken) formatToken(identifier, 0, 0, 0, 0)) .withColon(formatToken(colon, 0, 0, 0, 0)) .apply(); } @Override public PositionalArgumentNode transform(PositionalArgumentNode positionalArgumentNode) { if (!isInLineRange(positionalArgumentNode)) { return positionalArgumentNode; } ExpressionNode expression = this.modifyNode(positionalArgumentNode.expression()); return positionalArgumentNode.modify() .withExpression(expression) .apply(); } @Override public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) { if (!isInLineRange(basicLiteralNode)) { return basicLiteralNode; } Token literalToken = getToken(basicLiteralNode.literalToken()); return basicLiteralNode.modify() .withLiteralToken(formatToken(literalToken, 0, 0, 0, 0)) .apply(); } @Override public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) { if (!isInLineRange(serviceDeclarationNode)) { return serviceDeclarationNode; } Token serviceKeyword = getToken(serviceDeclarationNode.serviceKeyword()); IdentifierToken serviceName = (IdentifierToken) getToken(serviceDeclarationNode.serviceName()); Token onKeyword = getToken(serviceDeclarationNode.onKeyword()); MetadataNode metadata = this.modifyNode(serviceDeclarationNode.metadata().orElse(null)); SeparatedNodeList<ExpressionNode> expressions = this.modifySeparatedNodeList(serviceDeclarationNode.expressions()); Node serviceBody = this.modifyNode(serviceDeclarationNode.serviceBody()); if (metadata != null) { serviceDeclarationNode = serviceDeclarationNode.modify() .withMetadata(metadata).apply(); } return serviceDeclarationNode.modify() .withServiceKeyword(formatToken(serviceKeyword, 0, 0, 1, 0)) .withServiceName((IdentifierToken) formatToken(serviceName, 1, 0, 0, 0)) .withOnKeyword(formatToken(onKeyword, 1, 0, 0, 0)) .withExpressions(expressions) .withServiceBody(serviceBody) .apply(); } @Override public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) { if (!isInLineRange(serviceBodyNode)) { return serviceBodyNode; } Token openBraceToken = getToken(serviceBodyNode.openBraceToken()); Token closeBraceToken = getToken(serviceBodyNode.closeBraceToken()); NodeList<Node> resources = this.modifyNodeList(serviceBodyNode.resources()); return serviceBodyNode.modify() .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1)) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 1)) .withResources(resources) .apply(); } @Override public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) { if (!isInLineRange(explicitNewExpressionNode)) { return explicitNewExpressionNode; } Token newKeywordToken = getToken(explicitNewExpressionNode.newKeyword()); TypeDescriptorNode typeDescriptorNode = this.modifyNode(explicitNewExpressionNode.typeDescriptor()); return explicitNewExpressionNode.modify() .withNewKeyword(formatToken(newKeywordToken, 1, 1, 0, 0)) .withParenthesizedArgList(modifyNode(explicitNewExpressionNode.parenthesizedArgList())) .withTypeDescriptor(typeDescriptorNode) .apply(); } @Override public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) { if (!isInLineRange(parenthesizedArgList)) { return parenthesizedArgList; } Token openParenToken = getToken(parenthesizedArgList.openParenToken()); Token closeParenToken = getToken(parenthesizedArgList.closeParenToken()); SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(parenthesizedArgList .arguments()); return parenthesizedArgList.modify() .withArguments(arguments) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) { if (!isInLineRange(variableDeclarationNode)) { return variableDeclarationNode; } Token semicolonToken = getToken(variableDeclarationNode.semicolonToken()); Token equalToken = getToken(variableDeclarationNode.equalsToken().orElse(null)); Token finalToken = getToken(variableDeclarationNode.finalKeyword().orElse(null)); ExpressionNode initializer = this.modifyNode(variableDeclarationNode.initializer().orElse(null)); NodeList<AnnotationNode> annotationNodes = this.modifyNodeList(variableDeclarationNode.annotations()); TypedBindingPatternNode typedBindingPatternNode = this.modifyNode( variableDeclarationNode.typedBindingPattern()); if (equalToken != null) { variableDeclarationNode = variableDeclarationNode.modify() .withEqualsToken(formatToken(equalToken, 1, 1, 0, 0)).apply(); } if (finalToken != null) { variableDeclarationNode = variableDeclarationNode.modify() .withFinalKeyword(formatToken(finalToken, 0, 0, 0, 0)).apply(); } if (initializer != null) { variableDeclarationNode = variableDeclarationNode.modify() .withInitializer(initializer).apply(); } return variableDeclarationNode.modify() .withAnnotations(annotationNodes) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .withTypedBindingPattern(typedBindingPatternNode) .apply(); } @Override public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) { if (!isInLineRange(typedBindingPatternNode)) { return typedBindingPatternNode; } BindingPatternNode bindingPatternNode = this.modifyNode(typedBindingPatternNode.bindingPattern()); TypeDescriptorNode typeDescriptorNode = this.modifyNode(typedBindingPatternNode.typeDescriptor()); return typedBindingPatternNode.modify() .withBindingPattern(bindingPatternNode) .withTypeDescriptor(typeDescriptorNode) .apply(); } @Override public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) { if (!isInLineRange(captureBindingPatternNode)) { return captureBindingPatternNode; } Token variableName = getToken(captureBindingPatternNode.variableName()); return captureBindingPatternNode.modify() .withVariableName(formatToken(variableName, 1, 0, 0, 0)) .apply(); } @Override public ListBindingPatternNode transform(ListBindingPatternNode listBindingPatternNode) { if (!isInLineRange(listBindingPatternNode)) { return listBindingPatternNode; } SeparatedNodeList<BindingPatternNode> bindingPatternNodes = this.modifySeparatedNodeList( listBindingPatternNode.bindingPatterns()); Token openBracket = getToken(listBindingPatternNode.openBracket()); Token closeBracket = getToken(listBindingPatternNode.closeBracket()); RestBindingPatternNode restBindingPattern = this.modifyNode(listBindingPatternNode.restBindingPattern().orElse(null)); if (restBindingPattern != null) { listBindingPatternNode = listBindingPatternNode.modify() .withRestBindingPattern(restBindingPattern).apply(); } return listBindingPatternNode.modify() .withBindingPatterns(bindingPatternNodes) .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public MappingBindingPatternNode transform(MappingBindingPatternNode mappingBindingPatternNode) { if (!isInLineRange(mappingBindingPatternNode)) { return mappingBindingPatternNode; } Token openBraceToken = getToken(mappingBindingPatternNode.openBrace()); Token closeBraceToken = getToken(mappingBindingPatternNode.closeBrace()); SeparatedNodeList<FieldBindingPatternNode> fieldBindingPatternNodes = this.modifySeparatedNodeList(mappingBindingPatternNode.fieldBindingPatterns()); RestBindingPatternNode restBindingPattern = this.modifyNode(mappingBindingPatternNode.restBindingPattern().orElse(null)); if (restBindingPattern != null) { mappingBindingPatternNode = mappingBindingPatternNode.modify() .withRestBindingPattern(restBindingPattern).apply(); } return mappingBindingPatternNode.modify() .withOpenBrace(formatToken(openBraceToken, 1, 0, 0, 1)) .withCloseBrace(formatToken(closeBraceToken, 0, 0, 1, 0)) .withFieldBindingPatterns(fieldBindingPatternNodes) .apply(); } @Override public FieldBindingPatternFullNode transform(FieldBindingPatternFullNode fieldBindingPatternFullNode) { if (!isInLineRange(fieldBindingPatternFullNode)) { return fieldBindingPatternFullNode; } Token colon = getToken(fieldBindingPatternFullNode.colon()); BindingPatternNode bindingPatternNode = this.modifyNode(fieldBindingPatternFullNode.bindingPattern()); SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternFullNode.variableName()); return fieldBindingPatternFullNode.modify() .withBindingPattern(bindingPatternNode) .withColon(formatToken(colon, 0, 0, 0, 0)) .withVariableName(variableName) .apply(); } @Override public FieldBindingPatternVarnameNode transform(FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode) { if (!isInLineRange(fieldBindingPatternVarnameNode)) { return fieldBindingPatternVarnameNode; } SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternVarnameNode.variableName()); return fieldBindingPatternVarnameNode.modify() .withVariableName(variableName) .apply(); } @Override public RestBindingPatternNode transform(RestBindingPatternNode restBindingPatternNode) { if (!isInLineRange(restBindingPatternNode)) { return restBindingPatternNode; } Token ellipsisToken = getToken(restBindingPatternNode.ellipsisToken()); SimpleNameReferenceNode variableName = restBindingPatternNode.variableName(); return restBindingPatternNode.modify() .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .withVariableName(variableName) .apply(); } @Override public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) { if (!isInLineRange(remoteMethodCallActionNode)) { return remoteMethodCallActionNode; } Token openParenToken = getToken(remoteMethodCallActionNode.openParenToken()); Token closeParenToken = getToken(remoteMethodCallActionNode.closeParenToken()); Token rightArrowToken = getToken(remoteMethodCallActionNode.rightArrowToken()); SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(remoteMethodCallActionNode .arguments()); ExpressionNode expression = this.modifyNode(remoteMethodCallActionNode.expression()); SimpleNameReferenceNode methodName = this.modifyNode(remoteMethodCallActionNode.methodName()); return remoteMethodCallActionNode.modify() .withArguments(arguments) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .withExpression(expression) .withMethodName(methodName) .withRightArrowToken(formatToken(rightArrowToken, 0, 0, 0, 0)) .apply(); } @Override public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) { if (!isInLineRange(simpleNameReferenceNode)) { return simpleNameReferenceNode; } Token name = getToken(simpleNameReferenceNode.name()); return simpleNameReferenceNode.modify() .withName(formatToken(name, 0, 0, 0, 0)) .apply(); } @Override public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) { if (!isInLineRange(ifElseStatementNode)) { return ifElseStatementNode; } BlockStatementNode ifBody = this.modifyNode(ifElseStatementNode.ifBody()); ExpressionNode condition = this.modifyNode(ifElseStatementNode.condition()); Token ifKeyword = getToken(ifElseStatementNode.ifKeyword()); Node elseBody = this.modifyNode(ifElseStatementNode.elseBody().orElse(null)); int startColumn = 1; if (ifElseStatementNode.parent().kind() != SyntaxKind.ELSE_BLOCK) { startColumn = getStartColumn(ifElseStatementNode, ifElseStatementNode.kind(), true); } if (elseBody != null) { ifElseStatementNode = ifElseStatementNode.modify() .withElseBody(elseBody).apply(); } return ifElseStatementNode.modify() .withIfKeyword(formatToken(ifKeyword, startColumn, 0, 0, 0)) .withIfBody(ifBody) .withCondition(condition) .apply(); } @Override public ElseBlockNode transform(ElseBlockNode elseBlockNode) { if (!isInLineRange(elseBlockNode)) { return elseBlockNode; } Token elseKeyword = getToken(elseBlockNode.elseKeyword()); StatementNode elseBody = this.modifyNode(elseBlockNode.elseBody()); return elseBlockNode.modify() .withElseKeyword(formatToken(elseKeyword, 1, 0, 0, 0)) .withElseBody(elseBody) .apply(); } @Override public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) { if (!isInLineRange(bracedExpressionNode)) { return bracedExpressionNode; } Token openParen = getToken(bracedExpressionNode.openParen()); Token closeParen = getToken(bracedExpressionNode.closeParen()); ExpressionNode expression = this.modifyNode(bracedExpressionNode.expression()); return bracedExpressionNode.modify() .withOpenParen(formatToken(openParen, 1, 0, 0, 0)) .withCloseParen(formatToken(closeParen, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public TypeTestExpressionNode transform(TypeTestExpressionNode typeTestExpressionNode) { if (!isInLineRange(typeTestExpressionNode)) { return typeTestExpressionNode; } ExpressionNode expression = this.modifyNode(typeTestExpressionNode.expression()); Node typeDescriptor = this.modifyNode(typeTestExpressionNode.typeDescriptor()); Token isToken = getToken(typeTestExpressionNode.isKeyword()); return typeTestExpressionNode.modify() .withExpression(expression) .withIsKeyword(formatToken(isToken, 1, 1, 0, 0)) .withTypeDescriptor(typeDescriptor) .apply(); } @Override public ErrorTypeDescriptorNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) { if (!isInLineRange(errorTypeDescriptorNode)) { return errorTypeDescriptorNode; } Token errorKeywordToken = getToken(errorTypeDescriptorNode.errorKeywordToken()); ErrorTypeParamsNode errorTypeParamsNode = this.modifyNode(errorTypeDescriptorNode.errorTypeParamsNode().orElse(null)); if (errorTypeParamsNode != null) { errorTypeDescriptorNode = errorTypeDescriptorNode.modify() .withErrorTypeParamsNode(errorTypeParamsNode).apply(); } return errorTypeDescriptorNode.modify() .withErrorKeywordToken(formatToken(errorKeywordToken, 0, 0, 0, 0)) .apply(); } @Override public ModuleVariableDeclarationNode transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { if (!isInLineRange(moduleVariableDeclarationNode)) { return moduleVariableDeclarationNode; } Token equalsToken = getToken(moduleVariableDeclarationNode.equalsToken()); Token semicolonToken = getToken(moduleVariableDeclarationNode.semicolonToken()); Token finalKeyword = getToken(moduleVariableDeclarationNode.finalKeyword().orElse(null)); MetadataNode metadata = this.modifyNode(moduleVariableDeclarationNode.metadata().orElse(null)); ExpressionNode initializer = this.modifyNode(moduleVariableDeclarationNode.initializer()); if (metadata != null) { moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify() .withMetadata(metadata).apply(); } if (finalKeyword != null) { moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify() .withFinalKeyword(formatToken(finalKeyword, 0, 1, 0, 0)).apply(); } return moduleVariableDeclarationNode.modify() .withTypedBindingPattern(this.modifyNode(moduleVariableDeclarationNode.typedBindingPattern())) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withInitializer(initializer) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 2)) .apply(); } @Override public ConstantDeclarationNode transform(ConstantDeclarationNode constantDeclarationNode) { if (!isInLineRange(constantDeclarationNode)) { return constantDeclarationNode; } Token constKeyword = getToken(constantDeclarationNode.constKeyword()); Token variableName = getToken(constantDeclarationNode.variableName()); Token equalsToken = getToken(constantDeclarationNode.equalsToken()); Token semicolonToken = getToken(constantDeclarationNode.semicolonToken()); Token visibilityQualifier = getToken(constantDeclarationNode.visibilityQualifier().orElse(null)); Node initializer = this.modifyNode(constantDeclarationNode.initializer()); MetadataNode metadata = this.modifyNode(constantDeclarationNode.metadata().orElse(null)); TypeDescriptorNode typeDescriptorNode = this.modifyNode(constantDeclarationNode.typeDescriptor().orElse(null)); if (metadata != null) { constantDeclarationNode = constantDeclarationNode.modify() .withMetadata(metadata).apply(); } return constantDeclarationNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0)) .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withInitializer(initializer) .withSemicolonToken(formatToken(semicolonToken, 1, 1, 0, 1)) .withTypeDescriptor(typeDescriptorNode) .withVariableName(variableName) .apply(); } @Override public MetadataNode transform(MetadataNode metadataNode) { if (!isInLineRange(metadataNode)) { return metadataNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(metadataNode.annotations()); Node documentationString = metadataNode.documentationString().orElse(null); if (documentationString != null) { metadataNode = metadataNode.modify() .withDocumentationString(this.modifyNode(documentationString)).apply(); } return metadataNode.modify() .withAnnotations(annotations) .apply(); } @Override public BlockStatementNode transform(BlockStatementNode blockStatementNode) { if (!isInLineRange(blockStatementNode)) { return blockStatementNode; } int startColumn = getStartColumn(blockStatementNode, blockStatementNode.kind(), false); Token openBraceToken = getToken(blockStatementNode.openBraceToken()); Token closeBraceToken = getToken(blockStatementNode.closeBraceToken()); NodeList<StatementNode> statements = this.modifyNodeList(blockStatementNode.statements()); int trailingNewLines = 1; if (blockStatementNode.parent() != null && blockStatementNode.parent().kind() == SyntaxKind.IF_ELSE_STATEMENT) { IfElseStatementNode ifElseStatementNode = (IfElseStatementNode) blockStatementNode.parent(); if (ifElseStatementNode.elseBody().isPresent()) { trailingNewLines = 0; } } return blockStatementNode.modify() .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1)) .withCloseBraceToken(formatToken(closeBraceToken, startColumn, 0, 0, trailingNewLines)) .withStatements(statements) .apply(); } @Override public MappingConstructorExpressionNode transform( MappingConstructorExpressionNode mappingConstructorExpressionNode) { if (!isInLineRange(mappingConstructorExpressionNode)) { return mappingConstructorExpressionNode; } int startColumn = getStartColumn(mappingConstructorExpressionNode, mappingConstructorExpressionNode.kind(), false); Token openBrace = getToken(mappingConstructorExpressionNode.openBrace()); Token closeBrace = getToken(mappingConstructorExpressionNode.closeBrace()); SeparatedNodeList<MappingFieldNode> fields = this.modifySeparatedNodeList( mappingConstructorExpressionNode.fields()); return mappingConstructorExpressionNode.modify() .withOpenBrace(formatToken(openBrace, 0, 0, 0, 1)) .withCloseBrace(formatToken(closeBrace, startColumn, 0, 1, 0)) .withFields(fields) .apply(); } @Override public ListenerDeclarationNode transform(ListenerDeclarationNode listenerDeclarationNode) { if (!isInLineRange(listenerDeclarationNode)) { return listenerDeclarationNode; } Token equalsToken = getToken(listenerDeclarationNode.equalsToken()); Token variableName = getToken(listenerDeclarationNode.variableName()); Token semicolonToken = getToken(listenerDeclarationNode.semicolonToken()); Token listenerKeyword = getToken(listenerDeclarationNode.listenerKeyword()); Token visibilityQualifier = getToken(listenerDeclarationNode.visibilityQualifier().orElse(null)); Node initializer = this.modifyNode(listenerDeclarationNode.initializer()); MetadataNode metadata = this.modifyNode(listenerDeclarationNode.metadata().orElse(null)); Node typeDescriptor = this.modifyNode(listenerDeclarationNode.typeDescriptor()); if (visibilityQualifier != null) { listenerDeclarationNode = listenerDeclarationNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 0, 0, 0)).apply(); } if (metadata != null) { listenerDeclarationNode = listenerDeclarationNode.modify() .withMetadata(metadata).apply(); } return listenerDeclarationNode.modify() .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withInitializer(initializer) .withListenerKeyword(formatToken(listenerKeyword, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .withTypeDescriptor(typeDescriptor) .withVariableName(formatToken(variableName, 0, 0, 0, 0)) .apply(); } @Override public SpecificFieldNode transform(SpecificFieldNode specificFieldNode) { if (!isInLineRange(specificFieldNode)) { return specificFieldNode; } int startColumn = getStartColumn(specificFieldNode, specificFieldNode.kind(), true); Token fieldName = getToken((Token) specificFieldNode.fieldName()); Token readOnlyKeyword = specificFieldNode.readonlyKeyword().orElse(null); Token colon = getToken(specificFieldNode.colon().orElse(null)); ExpressionNode expressionNode = this.modifyNode(specificFieldNode.valueExpr().orElse(null)); if (readOnlyKeyword != null) { specificFieldNode = specificFieldNode.modify() .withReadonlyKeyword(formatToken(readOnlyKeyword, 0, 0, 0, 0)).apply(); } return specificFieldNode.modify() .withFieldName(formatToken(fieldName, startColumn, 0, 0, 0)) .withColon(formatToken(colon, 0, 1, 0, 0)) .withValueExpr(expressionNode) .apply(); } @Override public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) { if (!isInLineRange(binaryExpressionNode)) { return binaryExpressionNode; } Node lhsExpr = this.modifyNode(binaryExpressionNode.lhsExpr()); Node rhsExpr = this.modifyNode(binaryExpressionNode.rhsExpr()); Token operator = getToken(binaryExpressionNode.operator()); return binaryExpressionNode.modify() .withLhsExpr(lhsExpr) .withRhsExpr(rhsExpr) .withOperator(formatToken(operator, 1, 1, 0, 0)) .apply(); } @Override public ArrayTypeDescriptorNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) { if (!isInLineRange(arrayTypeDescriptorNode)) { return arrayTypeDescriptorNode; } Node arrayLength = arrayTypeDescriptorNode.arrayLength().orElse(null); Token openBracket = getToken(arrayTypeDescriptorNode.openBracket()); Token closeBracket = getToken(arrayTypeDescriptorNode.closeBracket()); TypeDescriptorNode memberTypeDesc = this.modifyNode(arrayTypeDescriptorNode.memberTypeDesc()); if (arrayLength != null) { arrayTypeDescriptorNode = arrayTypeDescriptorNode.modify() .withArrayLength(this.modifyNode(arrayLength)).apply(); } return arrayTypeDescriptorNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .withMemberTypeDesc(memberTypeDesc) .apply(); } @Override public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) { if (!isInLineRange(assignmentStatementNode)) { return assignmentStatementNode; } Node varRef = this.modifyNode(assignmentStatementNode.varRef()); ExpressionNode expression = this.modifyNode(assignmentStatementNode.expression()); Token equalsToken = getToken(assignmentStatementNode.equalsToken()); Token semicolonToken = getToken(assignmentStatementNode.semicolonToken()); return assignmentStatementNode.modify() .withVarRef(varRef) .withExpression(expression) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .apply(); } @Override public IndexedExpressionNode transform(IndexedExpressionNode indexedExpressionNode) { if (!isInLineRange(indexedExpressionNode)) { return indexedExpressionNode; } SeparatedNodeList<ExpressionNode> keyExpression = this.modifySeparatedNodeList( indexedExpressionNode.keyExpression()); ExpressionNode containerExpression = this.modifyNode(indexedExpressionNode.containerExpression()); Token openBracket = getToken(indexedExpressionNode.openBracket()); Token closeBracket = getToken(indexedExpressionNode.closeBracket()); return indexedExpressionNode.modify() .withKeyExpression(keyExpression) .withContainerExpression(containerExpression) .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) { if (!isInLineRange(checkExpressionNode)) { return checkExpressionNode; } int startColumn = getStartColumn(checkExpressionNode, checkExpressionNode.kind(), false); Token checkKeyword = getToken(checkExpressionNode.checkKeyword()); ExpressionNode expressionNode = this.modifyNode(checkExpressionNode.expression()); return checkExpressionNode.modify() .withCheckKeyword(formatToken(checkKeyword, startColumn, 1, 0, 0)) .withExpression(expressionNode) .apply(); } @Override public WhileStatementNode transform(WhileStatementNode whileStatementNode) { if (!isInLineRange(whileStatementNode)) { return whileStatementNode; } int startColumn = getStartColumn(whileStatementNode, whileStatementNode.kind(), true); Token whileKeyword = getToken(whileStatementNode.whileKeyword()); ExpressionNode condition = this.modifyNode(whileStatementNode.condition()); BlockStatementNode whileBody = this.modifyNode(whileStatementNode.whileBody()); return whileStatementNode.modify() .withWhileKeyword(formatToken(whileKeyword, startColumn, 0, 0, 0)) .withCondition(condition) .withWhileBody(whileBody) .apply(); } @Override public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) { if (!isInLineRange(returnStatementNode)) { return returnStatementNode; } int startColumn = getStartColumn(returnStatementNode, returnStatementNode.kind(), true); Token returnKeyword = getToken(returnStatementNode.returnKeyword()); ExpressionNode expressionNode = returnStatementNode.expression().orElse(null); Token semicolonToken = getToken(returnStatementNode.semicolonToken()); if (expressionNode != null) { returnStatementNode = returnStatementNode.modify() .withExpression(this.modifyNode(expressionNode)).apply(); } return returnStatementNode.modify() .withReturnKeyword(formatToken(returnKeyword, startColumn, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .apply(); } @Override public MethodCallExpressionNode transform(MethodCallExpressionNode methodCallExpressionNode) { if (!isInLineRange(methodCallExpressionNode)) { return methodCallExpressionNode; } SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(methodCallExpressionNode .arguments()); Token openParenToken = getToken(methodCallExpressionNode.openParenToken()); Token closeParenToken = getToken(methodCallExpressionNode.closeParenToken()); Token dotToken = getToken(methodCallExpressionNode.dotToken()); ExpressionNode expression = this.modifyNode(methodCallExpressionNode.expression()); NameReferenceNode methodName = this.modifyNode(methodCallExpressionNode.methodName()); return methodCallExpressionNode.modify() .withArguments(arguments) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .withDotToken(formatToken(dotToken, 0, 0, 0, 0)) .withExpression(expression) .withMethodName(methodName) .apply(); } @Override public NilLiteralNode transform(NilLiteralNode nilLiteralNode) { Token openParenToken = getToken(nilLiteralNode.openParenToken()); Token closeParenToken = getToken(nilLiteralNode.closeParenToken()); return nilLiteralNode.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public NilTypeDescriptorNode transform(NilTypeDescriptorNode nilTypeDescriptorNode) { Token openParenToken = getToken(nilTypeDescriptorNode.openParenToken()); Token closeParenToken = getToken(nilTypeDescriptorNode.closeParenToken()); return nilTypeDescriptorNode.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public UnionTypeDescriptorNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) { TypeDescriptorNode leftTypeDesc = this.modifyNode(unionTypeDescriptorNode.leftTypeDesc()); Token pipeToken = getToken(unionTypeDescriptorNode.pipeToken()); TypeDescriptorNode rightTypeDesc = this.modifyNode(unionTypeDescriptorNode.rightTypeDesc()); return unionTypeDescriptorNode.modify() .withLeftTypeDesc(leftTypeDesc) .withPipeToken(pipeToken) .withRightTypeDesc(rightTypeDesc) .apply(); } @Override @Override public ModuleXMLNamespaceDeclarationNode transform( ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) { Token xmlnsKeyword = getToken(moduleXMLNamespaceDeclarationNode.xmlnsKeyword()); ExpressionNode namespaceuri = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespaceuri()); Token asKeyword = getToken(moduleXMLNamespaceDeclarationNode.asKeyword()); IdentifierToken namespacePrefix = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespacePrefix()); Token semicolonToken = getToken(moduleXMLNamespaceDeclarationNode.semicolonToken()); return moduleXMLNamespaceDeclarationNode.modify() .withNamespacePrefix(namespacePrefix) .withNamespaceuri(namespaceuri) .withXmlnsKeyword(formatToken(xmlnsKeyword, 0, 0, 0, 0)) .withAsKeyword(formatToken(asKeyword, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public XmlTypeDescriptorNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) { int startColumn = getStartColumn(xmlTypeDescriptorNode, xmlTypeDescriptorNode.kind(), true); Token xmlKeywordToken = getToken(xmlTypeDescriptorNode.xmlKeywordToken()); TypeParameterNode xmlTypeParamsNode = this.modifyNode(xmlTypeDescriptorNode.xmlTypeParamsNode().orElse(null)); if (xmlTypeParamsNode != null) { xmlTypeDescriptorNode = xmlTypeDescriptorNode.modify() .withXmlTypeParamsNode(xmlTypeParamsNode).apply(); } return xmlTypeDescriptorNode.modify() .withXmlKeywordToken(formatToken(xmlKeywordToken, startColumn, 0, 0, 0)) .apply(); } @Override public XMLElementNode transform(XMLElementNode xMLElementNode) { XMLStartTagNode startTag = this.modifyNode(xMLElementNode.startTag()); NodeList<XMLItemNode> content = modifyNodeList(xMLElementNode.content()); XMLEndTagNode endTag = this.modifyNode(xMLElementNode.endTag()); return xMLElementNode.modify() .withStartTag(startTag) .withEndTag(endTag) .withContent(content) .apply(); } @Override public XMLStartTagNode transform(XMLStartTagNode xMLStartTagNode) { Token ltToken = getToken(xMLStartTagNode.ltToken()); XMLNameNode name = this.modifyNode(xMLStartTagNode.name()); NodeList<XMLAttributeNode> attributes = modifyNodeList(xMLStartTagNode.attributes()); Token getToken = getToken(xMLStartTagNode.getToken()); return xMLStartTagNode.modify() .withName(name) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withAttributes(attributes) .withGetToken(formatToken(getToken, 0, 0, 0, 0)) .apply(); } @Override public XMLEndTagNode transform(XMLEndTagNode xMLEndTagNode) { Token ltToken = getToken(xMLEndTagNode.ltToken()); Token slashToken = getToken(xMLEndTagNode.slashToken()); XMLNameNode name = this.modifyNode(xMLEndTagNode.name()); Token getToken = getToken(xMLEndTagNode.getToken()); return xMLEndTagNode.modify() .withName(name) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withSlashToken(formatToken(slashToken, 0, 0, 0, 0)) .withGetToken(formatToken(getToken, 0, 0, 0, 0)) .apply(); } @Override public XMLSimpleNameNode transform(XMLSimpleNameNode xMLSimpleNameNode) { Token name = getToken(xMLSimpleNameNode.name()); if (xMLSimpleNameNode.parent().kind() == SyntaxKind.XML_PI && ((XMLProcessingInstruction) xMLSimpleNameNode.parent()).data() != null) { return xMLSimpleNameNode.modify() .withName(formatToken(name, 0, 1, 0, 0)) .apply(); } return xMLSimpleNameNode.modify() .withName(formatToken(name, 0, 0, 0, 0)) .apply(); } @Override public XMLQualifiedNameNode transform(XMLQualifiedNameNode xMLQualifiedNameNode) { XMLSimpleNameNode prefix = this.modifyNode(xMLQualifiedNameNode.prefix()); Token colon = getToken(xMLQualifiedNameNode.colon()); XMLSimpleNameNode name = this.modifyNode(xMLQualifiedNameNode.name()); return xMLQualifiedNameNode.modify() .withPrefix(prefix) .withName(name) .withColon(formatToken(colon, 0, 0, 0, 0)) .apply(); } @Override public XMLEmptyElementNode transform(XMLEmptyElementNode xMLEmptyElementNode) { Token ltToken = getToken(xMLEmptyElementNode.ltToken()); XMLNameNode name = this.modifyNode(xMLEmptyElementNode.name()); NodeList<XMLAttributeNode> attributes = this.modifyNodeList(xMLEmptyElementNode.attributes()); Token slashToken = getToken(xMLEmptyElementNode.slashToken()); Token getToken = getToken(xMLEmptyElementNode.getToken()); return xMLEmptyElementNode.modify() .withName(name) .withAttributes(attributes) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withSlashToken(formatToken(slashToken, 0, 0, 0, 0)) .withGetToken(formatToken(getToken, 0, 0, 0, 0)) .apply(); } @Override public XMLTextNode transform(XMLTextNode xMLTextNode) { Token content = getToken(xMLTextNode.content()); return xMLTextNode.modify() .withContent(formatToken(content, 0, 0, 0, 0)) .apply(); } @Override public XMLAttributeNode transform(XMLAttributeNode xMLAttributeNode) { XMLNameNode attributeName = this.modifyNode(xMLAttributeNode.attributeName()); Token equalToken = getToken(xMLAttributeNode.equalToken()); XMLAttributeValue value = this.modifyNode(xMLAttributeNode.value()); return xMLAttributeNode.modify() .withValue(value) .withAttributeName(attributeName) .withEqualToken(formatToken(equalToken, 0, 0, 0, 0)) .apply(); } @Override public XMLAttributeValue transform(XMLAttributeValue xMLAttributeValue) { Token startQuote = getToken(xMLAttributeValue.startQuote()); NodeList<Node> value = this.modifyNodeList(xMLAttributeValue.value()); Token endQuote = getToken(xMLAttributeValue.endQuote()); return xMLAttributeValue.modify() .withStartQuote(formatToken(startQuote, 0, 0, 0, 0)) .withValue(value) .withEndQuote(formatToken(endQuote, 0, 0, 0, 0)) .apply(); } @Override public XMLComment transform(XMLComment xMLComment) { Token commentStart = getToken(xMLComment.commentStart()); NodeList<Node> content = this.modifyNodeList(xMLComment.content()); Token commentEnd = getToken(xMLComment.commentEnd()); return xMLComment.modify() .withCommentStart(formatToken(commentStart, 0, 0, 0, 0)) .withContent(content) .withCommentEnd(formatToken(commentEnd, 0, 0, 0, 0)) .apply(); } @Override public XMLProcessingInstruction transform(XMLProcessingInstruction xMLProcessingInstruction) { Token piStart = getToken(xMLProcessingInstruction.piStart()); XMLNameNode target = this.modifyNode(xMLProcessingInstruction.target()); NodeList<Node> data = this.modifyNodeList(xMLProcessingInstruction.data()); Token piEnd = getToken(xMLProcessingInstruction.piEnd()); return xMLProcessingInstruction.modify() .withTarget(target) .withPiStart(formatToken(piStart, 0, 0, 0, 0)) .withData(data) .withPiEnd(formatToken(piEnd, 0, 0, 0, 0)) .apply(); } @Override public XMLFilterExpressionNode transform(XMLFilterExpressionNode xMLFilterExpressionNode) { ExpressionNode expression = this.modifyNode(xMLFilterExpressionNode.expression()); XMLNamePatternChainingNode xmlPatternChain = this.modifyNode(xMLFilterExpressionNode.xmlPatternChain()); return xMLFilterExpressionNode.modify() .withExpression(expression) .withXmlPatternChain(xmlPatternChain) .apply(); } @Override public XMLStepExpressionNode transform(XMLStepExpressionNode xMLStepExpressionNode) { ExpressionNode expression = this.modifyNode(xMLStepExpressionNode.expression()); Node xmlStepStart = this.modifyNode(xMLStepExpressionNode.xmlStepStart()); return xMLStepExpressionNode.modify() .withExpression(expression) .withXmlStepStart(xmlStepStart) .apply(); } @Override public XMLNamePatternChainingNode transform(XMLNamePatternChainingNode xMLNamePatternChainingNode) { Token startToken = getToken(xMLNamePatternChainingNode.startToken()); SeparatedNodeList<Node> xmlNamePattern = modifySeparatedNodeList(xMLNamePatternChainingNode.xmlNamePattern()); Token gtToken = getToken(xMLNamePatternChainingNode.gtToken()); return xMLNamePatternChainingNode.modify() .withStartToken(formatToken(startToken, 0, 0, 0, 0)) .withXmlNamePattern(xmlNamePattern) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public XMLAtomicNamePatternNode transform(XMLAtomicNamePatternNode xMLAtomicNamePatternNode) { Token prefix = getToken(xMLAtomicNamePatternNode.prefix()); Token colon = getToken(xMLAtomicNamePatternNode.colon()); Token name = getToken(xMLAtomicNamePatternNode.name()); return xMLAtomicNamePatternNode.modify() .withPrefix(formatToken(prefix, 0, 0, 0, 0)) .withColon(formatToken(colon, 0, 0, 0, 0)) .withName(formatToken(name, 0, 0, 0, 0)) .apply(); } @Override public TemplateExpressionNode transform(TemplateExpressionNode templateExpressionNode) { Token type = getToken(templateExpressionNode.type().orElse(null)); Token startBacktick = getToken(templateExpressionNode.startBacktick()); NodeList<TemplateMemberNode> content = modifyNodeList(templateExpressionNode.content()); Token endBacktick = getToken(templateExpressionNode.endBacktick()); return templateExpressionNode.modify() .withStartBacktick(formatToken(startBacktick, 1, 0, 0, 0)) .withContent(content) .withType(formatToken(type, 0, 0, 0, 0)) .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0)) .apply(); } @Override public ByteArrayLiteralNode transform(ByteArrayLiteralNode byteArrayLiteralNode) { Token type = getToken(byteArrayLiteralNode.type()); Token startBacktick = getToken(byteArrayLiteralNode.startBacktick()); Token content = getToken(byteArrayLiteralNode.content().orElse(null)); Token endBacktick = getToken(byteArrayLiteralNode.endBacktick()); if (content != null) { byteArrayLiteralNode = byteArrayLiteralNode.modify() .withContent(formatToken(content, 0, 0, 0, 0)).apply(); } return byteArrayLiteralNode.modify() .withType(formatToken(type, 0, 0, 0, 0)) .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0)) .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0)) .apply(); } @Override public ListConstructorExpressionNode transform(ListConstructorExpressionNode listConstructorExpressionNode) { Token openBracket = getToken(listConstructorExpressionNode.openBracket()); SeparatedNodeList<Node> expressions = this.modifySeparatedNodeList(listConstructorExpressionNode.expressions()); Token closeBracket = getToken(listConstructorExpressionNode.closeBracket()); return listConstructorExpressionNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withExpressions(expressions) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public TypeReferenceNode transform(TypeReferenceNode typeReferenceNode) { Token asteriskToken = getToken(typeReferenceNode.asteriskToken()); Node typeName = this.modifyNode(typeReferenceNode.typeName()); Token semicolonToken = getToken(typeReferenceNode.semicolonToken()); return typeReferenceNode.modify() .withTypeName(typeName) .withAsteriskToken(formatToken(asteriskToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public TupleTypeDescriptorNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) { int startCol = getStartColumn(tupleTypeDescriptorNode, tupleTypeDescriptorNode.kind(), true); Token openBracketToken = getToken(tupleTypeDescriptorNode.openBracketToken()); SeparatedNodeList<Node> memberTypeDesc = this.modifySeparatedNodeList(tupleTypeDescriptorNode.memberTypeDesc()); Token closeBracketToken = getToken(tupleTypeDescriptorNode.closeBracketToken()); return tupleTypeDescriptorNode.modify() .withOpenBracketToken(formatToken(openBracketToken, startCol, 0, 0, 0)) .withMemberTypeDesc(memberTypeDesc) .withCloseBracketToken(formatToken(closeBracketToken, 0, 0, 0, 0)) .apply(); } @Override public MappingMatchPatternNode transform(MappingMatchPatternNode mappingMatchPatternNode) { Token openBraceToken = getToken(mappingMatchPatternNode.openBraceToken()); SeparatedNodeList<FieldMatchPatternNode> fieldMatchPatterns = this.modifySeparatedNodeList(mappingMatchPatternNode.fieldMatchPatterns()); RestMatchPatternNode restMatchPattern = this.modifyNode(mappingMatchPatternNode.restMatchPattern().orElse(null)); Token closeBraceToken = getToken(mappingMatchPatternNode.closeBraceToken()); if (restMatchPattern != null) { mappingMatchPatternNode = mappingMatchPatternNode.modify() .withRestMatchPattern(restMatchPattern).apply(); } return mappingMatchPatternNode.modify() .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0)) .withFieldMatchPatterns(fieldMatchPatterns) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0)) .apply(); } @Override public ParameterizedTypeDescriptorNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescriptorNode) { int startCol = getStartColumn(parameterizedTypeDescriptorNode, parameterizedTypeDescriptorNode.kind(), true); Token parameterizedType = getToken(parameterizedTypeDescriptorNode.parameterizedType()); TypeParameterNode typeParameter = this.modifyNode(parameterizedTypeDescriptorNode.typeParameter()); return parameterizedTypeDescriptorNode.modify() .withParameterizedType(formatToken(parameterizedType, startCol, 0, 0, 0)) .withTypeParameter(typeParameter) .apply(); } @Override public TypeParameterNode transform(TypeParameterNode typeParameterNode) { Token ltToken = getToken(typeParameterNode.ltToken()); TypeDescriptorNode typeNode = this.modifyNode(typeParameterNode.typeNode()); Token gtToken = getToken(typeParameterNode.gtToken()); return typeParameterNode.modify() .withTypeNode(typeNode) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public StartActionNode transform(StartActionNode startActionNode) { if (!isInLineRange(startActionNode)) { return startActionNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(startActionNode.annotations()); Token startKeyword = getToken(startActionNode.startKeyword()); ExpressionNode expression = this.modifyNode(startActionNode.expression()); return startActionNode.modify() .withAnnotations(annotations) .withStartKeyword(formatToken(startKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public FlushActionNode transform(FlushActionNode flushActionNode) { if (!isInLineRange(flushActionNode)) { return flushActionNode; } Token flushKeyword = getToken(flushActionNode.flushKeyword()); NameReferenceNode peerWorker = this.modifyNode(flushActionNode.peerWorker()); return flushActionNode.modify() .withFlushKeyword(formatToken(flushKeyword, 0, 1, 0, 0)) .withPeerWorker(peerWorker) .apply(); } @Override public NamedWorkerDeclarationNode transform(NamedWorkerDeclarationNode namedWorkerDeclarationNode) { if (!isInLineRange(namedWorkerDeclarationNode)) { return namedWorkerDeclarationNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(namedWorkerDeclarationNode.annotations()); Token workerKeyword = getToken(namedWorkerDeclarationNode.workerKeyword()); IdentifierToken workerName = this.modifyNode(namedWorkerDeclarationNode.workerName()); Node returnTypeDesc = this.modifyNode(namedWorkerDeclarationNode.returnTypeDesc().orElse(null)); BlockStatementNode workerBody = this.modifyNode(namedWorkerDeclarationNode.workerBody()); if (returnTypeDesc != null) { namedWorkerDeclarationNode = namedWorkerDeclarationNode.modify() .withReturnTypeDesc(returnTypeDesc).apply(); } return namedWorkerDeclarationNode.modify() .withAnnotations(annotations) .withWorkerKeyword(formatToken(workerKeyword, 0, 0, 0, 0)) .withWorkerName(workerName) .withWorkerBody(workerBody) .apply(); } @Override public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) { if (!isInLineRange(typeDefinitionNode)) { return typeDefinitionNode; } MetadataNode metadata = this.modifyNode(typeDefinitionNode.metadata().orElse(null)); Token visibilityQualifier = getToken(typeDefinitionNode.visibilityQualifier().orElse(null)); Token typeKeyword = getToken(typeDefinitionNode.typeKeyword()); Token typeName = getToken(typeDefinitionNode.typeName()); Node typeDescriptor = this.modifyNode(typeDefinitionNode.typeDescriptor()); Token semicolonToken = this.modifyToken(typeDefinitionNode.semicolonToken()); if (metadata != null) { typeDefinitionNode = typeDefinitionNode.modify() .withMetadata(metadata).apply(); } if (visibilityQualifier != null) { typeDefinitionNode = typeDefinitionNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0)).apply(); } return typeDefinitionNode.modify() .withTypeKeyword(formatToken(typeKeyword, 1, 1, 0, 0)) .withTypeName(formatToken(typeName, 1, 1, 0, 0)) .withTypeDescriptor(typeDescriptor) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) { if (!isInLineRange(compoundAssignmentStatementNode)) { return compoundAssignmentStatementNode; } ExpressionNode lhsExpression = this.modifyNode(compoundAssignmentStatementNode.lhsExpression()); Token binaryOperator = getToken(compoundAssignmentStatementNode.binaryOperator()); Token equalsToken = getToken(compoundAssignmentStatementNode.equalsToken()); ExpressionNode rhsExpression = this.modifyNode(compoundAssignmentStatementNode.rhsExpression()); Token semicolonToken = getToken(compoundAssignmentStatementNode.semicolonToken()); return compoundAssignmentStatementNode.modify() .withLhsExpression(lhsExpression) .withBinaryOperator(formatToken(binaryOperator, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withRhsExpression(rhsExpression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public BreakStatementNode transform(BreakStatementNode breakStatementNode) { if (!isInLineRange(breakStatementNode)) { return breakStatementNode; } Token breakToken = getToken(breakStatementNode.breakToken()); Token semicolonToken = getToken(breakStatementNode.semicolonToken()); return breakStatementNode.modify() .withBreakToken(formatToken(breakToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public ContinueStatementNode transform(ContinueStatementNode continueStatementNode) { if (!isInLineRange(continueStatementNode)) { return continueStatementNode; } Token continueToken = getToken(continueStatementNode.continueToken()); Token semicolonToken = getToken(continueStatementNode.semicolonToken()); return continueStatementNode.modify() .withContinueToken(formatToken(continueToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public ExternalFunctionBodyNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) { if (!isInLineRange(externalFunctionBodyNode)) { return externalFunctionBodyNode; } Token equalsToken = getToken(externalFunctionBodyNode.equalsToken()); NodeList<AnnotationNode> annotations = this.modifyNodeList(externalFunctionBodyNode.annotations()); Token externalKeyword = getToken(externalFunctionBodyNode.externalKeyword()); Token semicolonToken = getToken(externalFunctionBodyNode.semicolonToken()); return externalFunctionBodyNode.modify() .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withAnnotations(annotations) .withExternalKeyword(formatToken(externalKeyword, 1, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 1, 0, 0, 0)) .apply(); } @Override public PanicStatementNode transform(PanicStatementNode panicStatementNode) { if (!isInLineRange(panicStatementNode)) { return panicStatementNode; } Token panicKeyword = getToken(panicStatementNode.panicKeyword()); ExpressionNode expression = this.modifyNode(panicStatementNode.expression()); Token semicolonToken = getToken(panicStatementNode.semicolonToken()); return panicStatementNode.modify() .withPanicKeyword(formatToken(panicKeyword, 1, 1, 0, 0)) .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public LocalTypeDefinitionStatementNode transform( LocalTypeDefinitionStatementNode localTypeDefinitionStatementNode) { if (!isInLineRange(localTypeDefinitionStatementNode)) { return localTypeDefinitionStatementNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(localTypeDefinitionStatementNode.annotations()); Token typeKeyword = getToken(localTypeDefinitionStatementNode.typeKeyword()); Node typeName = this.modifyNode(localTypeDefinitionStatementNode.typeName()); Node typeDescriptor = this.modifyNode(localTypeDefinitionStatementNode.typeDescriptor()); Token semicolonToken = getToken(localTypeDefinitionStatementNode.semicolonToken()); return localTypeDefinitionStatementNode.modify() .withAnnotations(annotations) .withTypeKeyword(formatToken(typeKeyword, 0, 1, 0, 0)) .withTypeName(typeName) .withTypeDescriptor(typeDescriptor) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public LockStatementNode transform(LockStatementNode lockStatementNode) { if (!isInLineRange(lockStatementNode)) { return lockStatementNode; } Token lockKeyword = getToken(lockStatementNode.lockKeyword()); StatementNode blockStatement = this.modifyNode(lockStatementNode.blockStatement()); return lockStatementNode.modify() .withLockKeyword(formatToken(lockKeyword, 0, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public ForkStatementNode transform(ForkStatementNode forkStatementNode) { if (!isInLineRange(forkStatementNode)) { return forkStatementNode; } Token forkKeyword = getToken(forkStatementNode.forkKeyword()); Token openBraceToken = getToken(forkStatementNode.openBraceToken()); NodeList<NamedWorkerDeclarationNode> namedWorkerDeclarations = this.modifyNodeList(forkStatementNode.namedWorkerDeclarations()); Token closeBraceToken = getToken(forkStatementNode.closeBraceToken()); return forkStatementNode.modify() .withForkKeyword(formatToken(forkKeyword, 1, 1, 0, 0)) .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0)) .withNamedWorkerDeclarations(namedWorkerDeclarations) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0)) .apply(); } @Override public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) { if (!isInLineRange(forEachStatementNode)) { return forEachStatementNode; } Token forEachKeyword = getToken(forEachStatementNode.forEachKeyword()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(forEachStatementNode.typedBindingPattern()); Token inKeyword = getToken(forEachStatementNode.inKeyword()); Node actionOrExpressionNode = this.modifyNode(forEachStatementNode.actionOrExpressionNode()); StatementNode blockStatement = this.modifyNode(forEachStatementNode.blockStatement()); return forEachStatementNode.modify() .withForEachKeyword(formatToken(forEachKeyword, 0, 1, 0, 0)) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0)) .withActionOrExpressionNode(actionOrExpressionNode) .withBlockStatement(blockStatement) .apply(); } @Override public FailExpressionNode transform(FailExpressionNode failExpressionNode) { if (!isInLineRange(failExpressionNode)) { return failExpressionNode; } Token failKeyword = getToken(failExpressionNode.failKeyword()); ExpressionNode expression = this.modifyNode(failExpressionNode.expression()); return failExpressionNode.modify() .withFailKeyword(formatToken(failKeyword, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public FieldAccessExpressionNode transform(FieldAccessExpressionNode fieldAccessExpressionNode) { if (!isInLineRange(fieldAccessExpressionNode)) { return fieldAccessExpressionNode; } ExpressionNode expression = this.modifyNode(fieldAccessExpressionNode.expression()); Token dotToken = getToken(fieldAccessExpressionNode.dotToken()); NameReferenceNode fieldName = this.modifyNode(fieldAccessExpressionNode.fieldName()); return fieldAccessExpressionNode.modify() .withExpression(expression) .withDotToken(formatToken(dotToken, 0, 0, 0, 0)) .withFieldName(fieldName) .apply(); } @Override public TypeofExpressionNode transform(TypeofExpressionNode typeofExpressionNode) { if (!isInLineRange(typeofExpressionNode)) { return typeofExpressionNode; } Token typeofKeyword = getToken(typeofExpressionNode.typeofKeyword()); ExpressionNode expression = this.modifyNode(typeofExpressionNode.expression()); return typeofExpressionNode.modify() .withTypeofKeyword(formatToken(typeofKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public UnaryExpressionNode transform(UnaryExpressionNode unaryExpressionNode) { if (!isInLineRange(unaryExpressionNode)) { return unaryExpressionNode; } Token unaryOperator = getToken(unaryExpressionNode.unaryOperator()); ExpressionNode expression = this.modifyNode(unaryExpressionNode.expression()); return unaryExpressionNode.modify() .withUnaryOperator(formatToken(unaryOperator, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public ComputedNameFieldNode transform(ComputedNameFieldNode computedNameFieldNode) { if (!isInLineRange(computedNameFieldNode)) { return computedNameFieldNode; } Token openBracket = getToken(computedNameFieldNode.openBracket()); ExpressionNode fieldNameExpr = this.modifyNode(computedNameFieldNode.fieldNameExpr()); Token closeBracket = getToken(computedNameFieldNode.closeBracket()); Token colonToken = getToken(computedNameFieldNode.colonToken()); ExpressionNode valueExpr = this.modifyNode(computedNameFieldNode.valueExpr()); return computedNameFieldNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withFieldNameExpr(fieldNameExpr) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .withColonToken(formatToken(colonToken, 1, 1, 0, 0)) .withValueExpr(valueExpr) .apply(); } @Override public DefaultableParameterNode transform(DefaultableParameterNode defaultableParameterNode) { if (!isInLineRange(defaultableParameterNode)) { return defaultableParameterNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(defaultableParameterNode.annotations()); Node typeName = this.modifyNode(defaultableParameterNode.typeName()); Token paramName = getToken(defaultableParameterNode.paramName().orElse(null)); Token equalsToken = getToken(defaultableParameterNode.equalsToken()); Node expression = this.modifyNode(defaultableParameterNode.expression()); if (paramName != null) { defaultableParameterNode = defaultableParameterNode.modify() .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply(); } return defaultableParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public RestParameterNode transform(RestParameterNode restParameterNode) { if (!isInLineRange(restParameterNode)) { return restParameterNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(restParameterNode.annotations()); Node typeName = this.modifyNode(restParameterNode.typeName()); Token ellipsisToken = getToken(restParameterNode.ellipsisToken()); Token paramName = getToken(restParameterNode.paramName().orElse(null)); if (paramName != null) { restParameterNode = restParameterNode.modify() .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply(); } return restParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .apply(); } @Override public SpreadFieldNode transform(SpreadFieldNode spreadFieldNode) { if (!isInLineRange(spreadFieldNode)) { return spreadFieldNode; } Token ellipsis = getToken(spreadFieldNode.ellipsis()); ExpressionNode valueExpr = this.modifyNode(spreadFieldNode.valueExpr()); return spreadFieldNode.modify() .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0)) .withValueExpr(valueExpr) .apply(); } @Override public NamedArgumentNode transform(NamedArgumentNode namedArgumentNode) { if (!isInLineRange(namedArgumentNode)) { return namedArgumentNode; } SimpleNameReferenceNode argumentName = this.modifyNode(namedArgumentNode.argumentName()); Token equalsToken = getToken(namedArgumentNode.equalsToken()); ExpressionNode expression = this.modifyNode(namedArgumentNode.expression()); return namedArgumentNode.modify() .withArgumentName(argumentName) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public RestArgumentNode transform(RestArgumentNode restArgumentNode) { if (!isInLineRange(restArgumentNode)) { return restArgumentNode; } Token ellipsis = getToken(restArgumentNode.ellipsis()); ExpressionNode expression = this.modifyNode(restArgumentNode.expression()); return restArgumentNode.modify() .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public ObjectTypeDescriptorNode transform(ObjectTypeDescriptorNode objectTypeDescriptorNode) { if (!isInLineRange(objectTypeDescriptorNode)) { return objectTypeDescriptorNode; } NodeList<Token> objectTypeQualifiers = this.modifyNodeList(objectTypeDescriptorNode.objectTypeQualifiers()); Token objectKeyword = getToken(objectTypeDescriptorNode.objectKeyword()); Token openBrace = getToken(objectTypeDescriptorNode.openBrace()); NodeList<Node> members = this.modifyNodeList(objectTypeDescriptorNode.members()); Token closeBrace = getToken(objectTypeDescriptorNode.closeBrace()); return objectTypeDescriptorNode.modify() .withObjectTypeQualifiers(objectTypeQualifiers) .withObjectKeyword(formatToken(objectKeyword, 0, 1, 1, 0)) .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withMembers(members) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) { if (!isInLineRange(recordTypeDescriptorNode)) { return recordTypeDescriptorNode; } Token recordKeyword = getToken(recordTypeDescriptorNode.recordKeyword()); Token bodyStartDelimiter = getToken(recordTypeDescriptorNode.bodyStartDelimiter()); NodeList<Node> fields = this.modifyNodeList(recordTypeDescriptorNode.fields()); RecordRestDescriptorNode recordRestDescriptor = modifyNode(recordTypeDescriptorNode.recordRestDescriptor().orElse(null)); Token bodyEndDelimiter = getToken(recordTypeDescriptorNode.bodyEndDelimiter()); if (recordRestDescriptor != null) { recordTypeDescriptorNode = recordTypeDescriptorNode.modify() .withRecordRestDescriptor(recordRestDescriptor).apply(); } return recordTypeDescriptorNode.modify() .withRecordKeyword(formatToken(recordKeyword, 0, 1, 0, 0)) .withBodyStartDelimiter(formatToken(bodyStartDelimiter, 0, 0, 0, 0)) .withFields(fields) .withBodyEndDelimiter(formatToken(bodyEndDelimiter, 0, 0, 0, 0)) .apply(); } @Override public ObjectFieldNode transform(ObjectFieldNode objectFieldNode) { if (!isInLineRange(objectFieldNode)) { return objectFieldNode; } MetadataNode metadata = this.modifyNode(objectFieldNode.metadata().orElse(null)); Token visibilityQualifier = getToken(objectFieldNode.visibilityQualifier().orElse(null)); Token readonlyKeyword = getToken(objectFieldNode.readonlyKeyword().orElse(null)); Node typeName = this.modifyNode(objectFieldNode.typeName()); Token fieldName = getToken(objectFieldNode.fieldName()); Token equalsToken = getToken(objectFieldNode.equalsToken().orElse(null)); ExpressionNode expression = this.modifyNode(objectFieldNode.expression().orElse(null)); Token semicolonToken = getToken(objectFieldNode.semicolonToken()); if (metadata != null) { objectFieldNode = objectFieldNode.modify() .withMetadata(metadata).apply(); } if (visibilityQualifier != null) { objectFieldNode = objectFieldNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0)).apply(); } if (readonlyKeyword != null) { objectFieldNode = objectFieldNode.modify() .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply(); } return objectFieldNode.modify() .withTypeName(typeName) .withFieldName(formatToken(fieldName, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public RecordFieldNode transform(RecordFieldNode recordFieldNode) { if (!isInLineRange(recordFieldNode)) { return recordFieldNode; } MetadataNode metadata = this.modifyNode(recordFieldNode.metadata().orElse(null)); Token readonlyKeyword = getToken(recordFieldNode.readonlyKeyword().orElse(null)); Node typeName = this.modifyNode(recordFieldNode.typeName()); Token fieldName = getToken(recordFieldNode.fieldName()); Token questionMarkToken = getToken(recordFieldNode.questionMarkToken().orElse(null)); Token semicolonToken = getToken(recordFieldNode.semicolonToken()); if (metadata != null) { recordFieldNode = recordFieldNode.modify() .withMetadata(metadata).apply(); } if (readonlyKeyword != null) { recordFieldNode = recordFieldNode.modify() .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply(); } if (questionMarkToken != null) { recordFieldNode = recordFieldNode.modify() .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0)).apply(); } return recordFieldNode.modify() .withTypeName(typeName) .withFieldName(formatToken(fieldName, 0, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) { if (!isInLineRange(recordFieldWithDefaultValueNode)) { return recordFieldWithDefaultValueNode; } MetadataNode metadata = this.modifyNode(recordFieldWithDefaultValueNode.metadata().orElse(null)); Token readonlyKeyword = getToken(recordFieldWithDefaultValueNode.readonlyKeyword().orElse(null)); Node typeName = this.modifyNode(recordFieldWithDefaultValueNode.typeName()); Token fieldName = getToken(recordFieldWithDefaultValueNode.fieldName()); Token equalsToken = getToken(recordFieldWithDefaultValueNode.equalsToken()); ExpressionNode expression = this.modifyNode(recordFieldWithDefaultValueNode.expression()); Token semicolonToken = getToken(recordFieldWithDefaultValueNode.semicolonToken()); if (metadata != null) { recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify() .withMetadata(metadata).apply(); } if (readonlyKeyword != null) { recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify() .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply(); } return recordFieldWithDefaultValueNode.modify() .withTypeName(typeName) .withFieldName(formatToken(fieldName, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public RecordRestDescriptorNode transform(RecordRestDescriptorNode recordRestDescriptorNode) { if (!isInLineRange(recordRestDescriptorNode)) { return recordRestDescriptorNode; } Node typeName = this.modifyNode(recordRestDescriptorNode.typeName()); Token ellipsisToken = getToken(recordRestDescriptorNode.ellipsisToken()); Token semicolonToken = getToken(recordRestDescriptorNode.semicolonToken()); return recordRestDescriptorNode.modify() .withTypeName(typeName) .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public AnnotationNode transform(AnnotationNode annotationNode) { if (!isInLineRange(annotationNode)) { return annotationNode; } Token atToken = getToken(annotationNode.atToken()); Node annotReference = this.modifyNode(annotationNode.annotReference()); MappingConstructorExpressionNode annotValue = this.modifyNode(annotationNode.annotValue().orElse(null)); if (annotValue != null) { annotationNode = annotationNode.modify() .withAnnotValue(annotValue).apply(); } return annotationNode.modify() .withAtToken(formatToken(atToken, 1, 1, 0, 0)) .withAnnotReference(annotReference) .apply(); } @Override public AnnotationDeclarationNode transform(AnnotationDeclarationNode annotationDeclarationNode) { if (!isInLineRange(annotationDeclarationNode)) { return annotationDeclarationNode; } MetadataNode metadata = this.modifyNode(annotationDeclarationNode.metadata().orElse(null)); Token visibilityQualifier = getToken(annotationDeclarationNode.visibilityQualifier()); Token constKeyword = getToken(annotationDeclarationNode.constKeyword()); Token annotationKeyword = getToken(annotationDeclarationNode.annotationKeyword()); Node typeDescriptor = this.modifyNode(annotationDeclarationNode.typeDescriptor()); Token annotationTag = getToken(annotationDeclarationNode.annotationTag()); Token onKeyword = getToken(annotationDeclarationNode.onKeyword()); SeparatedNodeList<Node> attachPoints = this.modifySeparatedNodeList(annotationDeclarationNode.attachPoints()); Token semicolonToken = getToken(annotationDeclarationNode.semicolonToken()); if (metadata != null) { annotationDeclarationNode = annotationDeclarationNode.modify() .withMetadata(metadata).apply(); } return annotationDeclarationNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0)) .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0)) .withAnnotationKeyword(formatToken(annotationKeyword, 0, 0, 0, 0)) .withTypeDescriptor(typeDescriptor) .withAnnotationTag(formatToken(annotationTag, 0, 0, 0, 0)) .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0)) .withAttachPoints(attachPoints) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public AnnotationAttachPointNode transform(AnnotationAttachPointNode annotationAttachPointNode) { if (!isInLineRange(annotationAttachPointNode)) { return annotationAttachPointNode; } Token sourceKeyword = getToken(annotationAttachPointNode.sourceKeyword()); Token firstIdent = getToken(annotationAttachPointNode.firstIdent()); Token secondIdent = getToken(annotationAttachPointNode.secondIdent()); return annotationAttachPointNode.modify() .withSourceKeyword(formatToken(sourceKeyword, 0, 1, 0, 0)) .withFirstIdent(formatToken(firstIdent, 0, 0, 0, 0)) .withSecondIdent(formatToken(secondIdent, 0, 0, 0, 0)) .apply(); } @Override public NamedWorkerDeclarator transform(NamedWorkerDeclarator namedWorkerDeclarator) { if (!isInLineRange(namedWorkerDeclarator)) { return namedWorkerDeclarator; } NodeList<StatementNode> workerInitStatements = this.modifyNodeList(namedWorkerDeclarator.workerInitStatements()); NodeList<NamedWorkerDeclarationNode> namedWorkerDeclarations = this.modifyNodeList(namedWorkerDeclarator.namedWorkerDeclarations()); return namedWorkerDeclarator.modify() .withNamedWorkerDeclarations(namedWorkerDeclarations) .withWorkerInitStatements(workerInitStatements) .apply(); } @Override public TrapExpressionNode transform(TrapExpressionNode trapExpressionNode) { if (!isInLineRange(trapExpressionNode)) { return trapExpressionNode; } Token trapKeyword = getToken(trapExpressionNode.trapKeyword()); ExpressionNode expression = this.modifyNode(trapExpressionNode.expression()); return trapExpressionNode.modify() .withTrapKeyword(formatToken(trapKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public TypeCastExpressionNode transform(TypeCastExpressionNode typeCastExpressionNode) { if (!isInLineRange(typeCastExpressionNode)) { return typeCastExpressionNode; } Token ltToken = getToken(typeCastExpressionNode.ltToken()); TypeCastParamNode typeCastParam = this.modifyNode(typeCastExpressionNode.typeCastParam()); Token gtToken = getToken(typeCastExpressionNode.gtToken()); ExpressionNode expression = this.modifyNode(typeCastExpressionNode.expression()); return typeCastExpressionNode.modify() .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withTypeCastParam(typeCastParam) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public TypeCastParamNode transform(TypeCastParamNode typeCastParamNode) { if (!isInLineRange(typeCastParamNode)) { return typeCastParamNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(typeCastParamNode.annotations()); Node type = this.modifyNode(typeCastParamNode.type()); return typeCastParamNode.modify() .withAnnotations(annotations) .withType(type) .apply(); } @Override public TableConstructorExpressionNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) { if (!isInLineRange(tableConstructorExpressionNode)) { return tableConstructorExpressionNode; } Token tableKeyword = getToken(tableConstructorExpressionNode.tableKeyword()); KeySpecifierNode keySpecifier = this.modifyNode(tableConstructorExpressionNode.keySpecifier().orElse(null)); Token openBracket = getToken(tableConstructorExpressionNode.openBracket()); SeparatedNodeList<Node> mappingConstructors = this.modifySeparatedNodeList(tableConstructorExpressionNode.mappingConstructors()); Token closeBracket = this.modifyToken(tableConstructorExpressionNode.closeBracket()); return tableConstructorExpressionNode.modify() .withTableKeyword(formatToken(tableKeyword, 0, 1, 0, 0)) .withKeySpecifier(keySpecifier) .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withMappingConstructors(mappingConstructors) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public KeySpecifierNode transform(KeySpecifierNode keySpecifierNode) { if (!isInLineRange(keySpecifierNode)) { return keySpecifierNode; } Token keyKeyword = getToken(keySpecifierNode.keyKeyword()); Token openParenToken = getToken(keySpecifierNode.openParenToken()); SeparatedNodeList<IdentifierToken> fieldNames = this.modifySeparatedNodeList(keySpecifierNode.fieldNames()); Token closeParenToken = getToken(keySpecifierNode.closeParenToken()); return keySpecifierNode.modify() .withKeyKeyword(formatToken(keyKeyword, 0, 1, 0, 0)) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withFieldNames(fieldNames) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public ErrorTypeParamsNode transform(ErrorTypeParamsNode errorTypeParamsNode) { if (!isInLineRange(errorTypeParamsNode)) { return errorTypeParamsNode; } Token ltToken = getToken(errorTypeParamsNode.ltToken()); Node parameter = this.modifyNode(errorTypeParamsNode.parameter()); Token gtToken = getToken(errorTypeParamsNode.gtToken()); return errorTypeParamsNode.modify() .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withParameter(parameter) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public StreamTypeDescriptorNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) { if (!isInLineRange(streamTypeDescriptorNode)) { return streamTypeDescriptorNode; } Token streamKeywordToken = getToken(streamTypeDescriptorNode.streamKeywordToken()); Node streamTypeParamsNode = this.modifyNode(streamTypeDescriptorNode.streamTypeParamsNode().orElse(null)); if (streamTypeParamsNode != null) { streamTypeDescriptorNode = streamTypeDescriptorNode.modify() .withStreamTypeParamsNode(streamTypeParamsNode).apply(); } return streamTypeDescriptorNode.modify() .withStreamKeywordToken(formatToken(streamKeywordToken, 0, 1, 0, 0)) .apply(); } @Override public StreamTypeParamsNode transform(StreamTypeParamsNode streamTypeParamsNode) { if (!isInLineRange(streamTypeParamsNode)) { return streamTypeParamsNode; } Token ltToken = getToken(streamTypeParamsNode.ltToken()); Node leftTypeDescNode = this.modifyNode(streamTypeParamsNode.leftTypeDescNode()); Token commaToken = getToken(streamTypeParamsNode.commaToken().orElse(null)); Node rightTypeDescNode = this.modifyNode(streamTypeParamsNode.rightTypeDescNode().orElse(null)); Token gtToken = getToken(streamTypeParamsNode.gtToken()); if (commaToken != null) { streamTypeParamsNode = streamTypeParamsNode.modify() .withCommaToken(formatToken(commaToken, 0, 1, 0, 0)).apply(); } if (rightTypeDescNode != null) { streamTypeParamsNode = streamTypeParamsNode.modify() .withRightTypeDescNode(rightTypeDescNode).apply(); } return streamTypeParamsNode.modify() .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withLeftTypeDescNode(leftTypeDescNode) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public TypedescTypeDescriptorNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) { if (!isInLineRange(typedescTypeDescriptorNode)) { return typedescTypeDescriptorNode; } Token typedescKeywordToken = this.modifyToken(typedescTypeDescriptorNode.typedescKeywordToken()); TypeParameterNode typedescTypeParamsNode = this.modifyNode(typedescTypeDescriptorNode.typedescTypeParamsNode().orElse(null)); if (typedescTypeParamsNode != null) { typedescTypeDescriptorNode = typedescTypeDescriptorNode.modify() .withTypedescTypeParamsNode(typedescTypeParamsNode).apply(); } return typedescTypeDescriptorNode.modify() .withTypedescKeywordToken(formatToken(typedescKeywordToken, 0, 1, 0, 0)) .apply(); } @Override public LetExpressionNode transform(LetExpressionNode letExpressionNode) { if (!isInLineRange(letExpressionNode)) { return letExpressionNode; } Token letKeyword = getToken(letExpressionNode.letKeyword()); SeparatedNodeList<LetVariableDeclarationNode> letVarDeclarations = this.modifySeparatedNodeList(letExpressionNode.letVarDeclarations()); Token inKeyword = getToken(letExpressionNode.inKeyword()); ExpressionNode expression = this.modifyNode(letExpressionNode.expression()); return letExpressionNode.modify() .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0)) .withLetVarDeclarations(letVarDeclarations) .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public LetVariableDeclarationNode transform(LetVariableDeclarationNode letVariableDeclarationNode) { if (!isInLineRange(letVariableDeclarationNode)) { return letVariableDeclarationNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(letVariableDeclarationNode.annotations()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(letVariableDeclarationNode.typedBindingPattern()); Token equalsToken = getToken(letVariableDeclarationNode.equalsToken()); ExpressionNode expression = this.modifyNode(letVariableDeclarationNode.expression()); return letVariableDeclarationNode.modify() .withAnnotations(annotations) .withTypedBindingPattern(typedBindingPattern) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public InterpolationNode transform(InterpolationNode interpolationNode) { if (!isInLineRange(interpolationNode)) { return interpolationNode; } Token interpolationStartToken = getToken(interpolationNode.interpolationStartToken()); ExpressionNode expression = this.modifyNode(interpolationNode.expression()); Token interpolationEndToken = getToken(interpolationNode.interpolationEndToken()); return interpolationNode.modify() .withInterpolationStartToken(formatToken(interpolationStartToken, 0, 0, 0, 0)) .withExpression(expression) .withInterpolationEndToken(formatToken(interpolationEndToken, 0, 0, 0, 0)) .apply(); } @Override public TableTypeDescriptorNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) { if (!isInLineRange(tableTypeDescriptorNode)) { return tableTypeDescriptorNode; } Token tableKeywordToken = getToken(tableTypeDescriptorNode.tableKeywordToken()); Node rowTypeParameterNode = this.modifyNode(tableTypeDescriptorNode.rowTypeParameterNode()); Node keyConstraintNode = this.modifyNode(tableTypeDescriptorNode.keyConstraintNode()); return tableTypeDescriptorNode.modify() .withTableKeywordToken(formatToken(tableKeywordToken, 0, 1, 0, 0)) .withRowTypeParameterNode(rowTypeParameterNode) .withKeyConstraintNode(keyConstraintNode) .apply(); } @Override public KeyTypeConstraintNode transform(KeyTypeConstraintNode keyTypeConstraintNode) { if (!isInLineRange(keyTypeConstraintNode)) { return keyTypeConstraintNode; } Token keyKeywordToken = getToken(keyTypeConstraintNode.keyKeywordToken()); Node typeParameterNode = this.modifyNode(keyTypeConstraintNode.typeParameterNode()); return keyTypeConstraintNode.modify() .withKeyKeywordToken(formatToken(keyKeywordToken, 0, 1, 0, 0)) .withTypeParameterNode(typeParameterNode) .apply(); } @Override public FunctionTypeDescriptorNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) { if (!isInLineRange(functionTypeDescriptorNode)) { return functionTypeDescriptorNode; } Token functionKeyword = getToken(functionTypeDescriptorNode.functionKeyword()); FunctionSignatureNode functionSignature = this.modifyNode(functionTypeDescriptorNode.functionSignature()); return functionTypeDescriptorNode.modify() .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0)) .withFunctionSignature(functionSignature) .apply(); } @Override public ExplicitAnonymousFunctionExpressionNode transform( ExplicitAnonymousFunctionExpressionNode explicitAnonymousFunctionExpressionNode) { if (!isInLineRange(explicitAnonymousFunctionExpressionNode)) { return explicitAnonymousFunctionExpressionNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(explicitAnonymousFunctionExpressionNode.annotations()); Token functionKeyword = getToken(explicitAnonymousFunctionExpressionNode.functionKeyword()); FunctionSignatureNode functionSignature = this.modifyNode(explicitAnonymousFunctionExpressionNode.functionSignature()); FunctionBodyNode functionBody = this.modifyNode(explicitAnonymousFunctionExpressionNode.functionBody()); return explicitAnonymousFunctionExpressionNode.modify() .withAnnotations(annotations) .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0)) .withFunctionSignature(functionSignature) .withFunctionBody(functionBody) .apply(); } @Override public ExpressionFunctionBodyNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) { if (!isInLineRange(expressionFunctionBodyNode)) { return expressionFunctionBodyNode; } Token rightDoubleArrow = getToken(expressionFunctionBodyNode.rightDoubleArrow()); ExpressionNode expression = this.modifyNode(expressionFunctionBodyNode.expression()); Token semicolon = this.modifyToken(expressionFunctionBodyNode.semicolon().orElse(null)); if (semicolon != null) { expressionFunctionBodyNode = expressionFunctionBodyNode.modify() .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)).apply(); } return expressionFunctionBodyNode.modify() .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public ParenthesisedTypeDescriptorNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) { if (!isInLineRange(parenthesisedTypeDescriptorNode)) { return parenthesisedTypeDescriptorNode; } Token openParenToken = getToken(parenthesisedTypeDescriptorNode.openParenToken()); TypeDescriptorNode typedesc = this.modifyNode(parenthesisedTypeDescriptorNode.typedesc()); Token closeParenToken = getToken(parenthesisedTypeDescriptorNode.closeParenToken()); return parenthesisedTypeDescriptorNode.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withTypedesc(typedesc) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public ImplicitNewExpressionNode transform(ImplicitNewExpressionNode implicitNewExpressionNode) { if (!isInLineRange(implicitNewExpressionNode)) { return implicitNewExpressionNode; } Token newKeyword = getToken(implicitNewExpressionNode.newKeyword()); ParenthesizedArgList parenthesizedArgList = this.modifyNode(implicitNewExpressionNode.parenthesizedArgList().orElse(null)); if (parenthesizedArgList != null) { implicitNewExpressionNode = implicitNewExpressionNode.modify() .withParenthesizedArgList(parenthesizedArgList).apply(); } return implicitNewExpressionNode.modify() .withNewKeyword(formatToken(newKeyword, 0, 1, 0, 0)) .apply(); } @Override public QueryConstructTypeNode transform(QueryConstructTypeNode queryConstructTypeNode) { if (!isInLineRange(queryConstructTypeNode)) { return queryConstructTypeNode; } Token keyword = getToken(queryConstructTypeNode.keyword()); KeySpecifierNode keySpecifier = this.modifyNode(queryConstructTypeNode.keySpecifier().orElse(null)); if (keySpecifier != null) { queryConstructTypeNode = queryConstructTypeNode.modify() .withKeySpecifier(keySpecifier).apply(); } return queryConstructTypeNode.modify() .withKeyword(formatToken(keyword, 0, 0, 0, 0)) .apply(); } @Override public FromClauseNode transform(FromClauseNode fromClauseNode) { if (!isInLineRange(fromClauseNode)) { return fromClauseNode; } Token fromKeyword = getToken(fromClauseNode.fromKeyword()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(fromClauseNode.typedBindingPattern()); Token inKeyword = getToken(fromClauseNode.inKeyword()); ExpressionNode expression = this.modifyNode(fromClauseNode.expression()); return fromClauseNode.modify() .withFromKeyword(formatToken(fromKeyword, 1, 1, 0, 0)) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(formatToken(inKeyword, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public WhereClauseNode transform(WhereClauseNode whereClauseNode) { if (!isInLineRange(whereClauseNode)) { return whereClauseNode; } Token whereKeyword = getToken(whereClauseNode.whereKeyword()); ExpressionNode expression = this.modifyNode(whereClauseNode.expression()); return whereClauseNode.modify() .withWhereKeyword(formatToken(whereKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public LetClauseNode transform(LetClauseNode letClauseNode) { if (!isInLineRange(letClauseNode)) { return letClauseNode; } Token letKeyword = getToken(letClauseNode.letKeyword()); SeparatedNodeList<LetVariableDeclarationNode> letVarDeclarations = this.modifySeparatedNodeList(letClauseNode.letVarDeclarations()); return letClauseNode.modify() .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0)) .withLetVarDeclarations(letVarDeclarations) .apply(); } @Override public QueryPipelineNode transform(QueryPipelineNode queryPipelineNode) { if (!isInLineRange(queryPipelineNode)) { return queryPipelineNode; } FromClauseNode fromClause = this.modifyNode(queryPipelineNode.fromClause()); NodeList<ClauseNode> intermediateClauses = this.modifyNodeList(queryPipelineNode.intermediateClauses()); return queryPipelineNode.modify() .withFromClause(fromClause) .withIntermediateClauses(intermediateClauses) .apply(); } @Override public SelectClauseNode transform(SelectClauseNode selectClauseNode) { if (!isInLineRange(selectClauseNode)) { return selectClauseNode; } Token selectKeyword = getToken(selectClauseNode.selectKeyword()); ExpressionNode expression = this.modifyNode(selectClauseNode.expression()); return selectClauseNode.modify() .withSelectKeyword(formatToken(selectKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public QueryExpressionNode transform(QueryExpressionNode queryExpressionNode) { if (!isInLineRange(queryExpressionNode)) { return queryExpressionNode; } QueryConstructTypeNode queryConstructType = this.modifyNode(queryExpressionNode.queryConstructType().orElse(null)); QueryPipelineNode queryPipeline = this.modifyNode(queryExpressionNode.queryPipeline()); SelectClauseNode selectClause = this.modifyNode(queryExpressionNode.selectClause()); OnConflictClauseNode onConflictClause = this.modifyNode(queryExpressionNode.onConflictClause().orElse(null)); LimitClauseNode limitClause = this.modifyNode(queryExpressionNode.limitClause().orElse(null)); if (queryConstructType != null) { queryExpressionNode = queryExpressionNode.modify() .withQueryConstructType(queryConstructType).apply(); } if (onConflictClause != null) { queryExpressionNode = queryExpressionNode.modify() .withOnConflictClause(onConflictClause).apply(); } if (limitClause != null) { queryExpressionNode = queryExpressionNode.modify() .withLimitClause(limitClause).apply(); } return queryExpressionNode.modify() .withQueryPipeline(queryPipeline) .withSelectClause(selectClause) .apply(); } @Override public IntersectionTypeDescriptorNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) { if (!isInLineRange(intersectionTypeDescriptorNode)) { return intersectionTypeDescriptorNode; } Node leftTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.leftTypeDesc()); Token bitwiseAndToken = getToken(intersectionTypeDescriptorNode.bitwiseAndToken()); Node rightTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.rightTypeDesc()); return intersectionTypeDescriptorNode.modify() .withLeftTypeDesc(leftTypeDesc) .withBitwiseAndToken(formatToken(bitwiseAndToken, 1, 1, 0, 0)) .withRightTypeDesc(rightTypeDesc) .apply(); } @Override public ImplicitAnonymousFunctionParameters transform( ImplicitAnonymousFunctionParameters implicitAnonymousFunctionParameters) { if (!isInLineRange(implicitAnonymousFunctionParameters)) { return implicitAnonymousFunctionParameters; } Token openParenToken = getToken(implicitAnonymousFunctionParameters.openParenToken()); SeparatedNodeList<SimpleNameReferenceNode> parameters = this.modifySeparatedNodeList(implicitAnonymousFunctionParameters.parameters()); Token closeParenToken = getToken(implicitAnonymousFunctionParameters.closeParenToken()); return implicitAnonymousFunctionParameters.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withParameters(parameters) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public ImplicitAnonymousFunctionExpressionNode transform( ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) { if (!isInLineRange(implicitAnonymousFunctionExpressionNode)) { return implicitAnonymousFunctionExpressionNode; } Node params = this.modifyNode(implicitAnonymousFunctionExpressionNode.params()); Token rightDoubleArrow = getToken(implicitAnonymousFunctionExpressionNode.rightDoubleArrow()); ExpressionNode expression = this.modifyNode(implicitAnonymousFunctionExpressionNode.expression()); return implicitAnonymousFunctionExpressionNode.modify() .withParams(params) .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) { if (!isInLineRange(singletonTypeDescriptorNode)) { return singletonTypeDescriptorNode; } ExpressionNode simpleContExprNode = this.modifyNode(singletonTypeDescriptorNode.simpleContExprNode()); return singletonTypeDescriptorNode.modify() .withSimpleContExprNode(simpleContExprNode) .apply(); } @Override public MethodDeclarationNode transform(MethodDeclarationNode methodDeclarationNode) { if (!isInLineRange(methodDeclarationNode)) { return methodDeclarationNode; } MetadataNode metadata = this.modifyNode(methodDeclarationNode.metadata().orElse(null)); NodeList<Token> qualifierList = this.modifyNodeList(methodDeclarationNode.qualifierList()); Token functionKeyword = getToken(methodDeclarationNode.functionKeyword()); IdentifierToken methodName = this.modifyNode(methodDeclarationNode.methodName()); FunctionSignatureNode methodSignature = this.modifyNode(methodDeclarationNode.methodSignature()); Token semicolon = getToken(methodDeclarationNode.semicolon()); if (metadata != null) { methodDeclarationNode = methodDeclarationNode.modify() .withMetadata(metadata).apply(); } return methodDeclarationNode.modify() .withQualifierList(qualifierList) .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0)) .withMethodName(methodName) .withMethodSignature(methodSignature) .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)) .apply(); } @Override public WildcardBindingPatternNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) { if (!isInLineRange(wildcardBindingPatternNode)) { return wildcardBindingPatternNode; } Token underscoreToken = getToken(wildcardBindingPatternNode.underscoreToken()); return wildcardBindingPatternNode.modify() .withUnderscoreToken(formatToken(underscoreToken, 0, 0, 0, 0)) .apply(); } @Override public ErrorBindingPatternNode transform(ErrorBindingPatternNode errorBindingPatternNode) { if (!isInLineRange(errorBindingPatternNode)) { return errorBindingPatternNode; } Token errorKeyword = getToken(errorBindingPatternNode.errorKeyword()); Node typeReference = this.modifyNode(errorBindingPatternNode.typeReference().orElse(null)); Token openParenthesis = getToken(errorBindingPatternNode.openParenthesis()); SeparatedNodeList<BindingPatternNode> argListBindingPatterns = this.modifySeparatedNodeList(errorBindingPatternNode.argListBindingPatterns()); Token closeParenthesis = getToken(errorBindingPatternNode.closeParenthesis()); return errorBindingPatternNode.modify() .withErrorKeyword(formatToken(errorKeyword, 0, 1, 0, 0)) .withTypeReference(typeReference) .withOpenParenthesis(formatToken(openParenthesis, 0, 0, 0, 0)) .withArgListBindingPatterns(argListBindingPatterns) .withCloseParenthesis(formatToken(closeParenthesis, 0, 0, 0, 0)) .apply(); } @Override public NamedArgBindingPatternNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) { if (!isInLineRange(namedArgBindingPatternNode)) { return namedArgBindingPatternNode; } IdentifierToken argName = this.modifyNode(namedArgBindingPatternNode.argName()); Token equalsToken = getToken(namedArgBindingPatternNode.equalsToken()); BindingPatternNode bindingPattern = this.modifyNode(namedArgBindingPatternNode.bindingPattern()); return namedArgBindingPatternNode.modify() .withArgName(argName) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withBindingPattern(bindingPattern) .apply(); } @Override public AsyncSendActionNode transform(AsyncSendActionNode asyncSendActionNode) { if (!isInLineRange(asyncSendActionNode)) { return asyncSendActionNode; } ExpressionNode expression = this.modifyNode(asyncSendActionNode.expression()); Token rightArrowToken = getToken(asyncSendActionNode.rightArrowToken()); SimpleNameReferenceNode peerWorker = this.modifyNode(asyncSendActionNode.peerWorker()); return asyncSendActionNode.modify() .withExpression(expression) .withRightArrowToken(formatToken(rightArrowToken, 1, 1, 0, 0)) .withPeerWorker(peerWorker) .apply(); } @Override public SyncSendActionNode transform(SyncSendActionNode syncSendActionNode) { if (!isInLineRange(syncSendActionNode)) { return syncSendActionNode; } ExpressionNode expression = this.modifyNode(syncSendActionNode.expression()); Token syncSendToken = getToken(syncSendActionNode.syncSendToken()); SimpleNameReferenceNode peerWorker = this.modifyNode(syncSendActionNode.peerWorker()); return syncSendActionNode.modify() .withExpression(expression) .withSyncSendToken(formatToken(syncSendToken, 1, 1, 0, 0)) .withPeerWorker(peerWorker) .apply(); } @Override public ReceiveActionNode transform(ReceiveActionNode receiveActionNode) { if (!isInLineRange(receiveActionNode)) { return receiveActionNode; } Token leftArrow = getToken(receiveActionNode.leftArrow()); SimpleNameReferenceNode receiveWorkers = this.modifyNode(receiveActionNode.receiveWorkers()); return receiveActionNode.modify() .withLeftArrow(formatToken(leftArrow, 1, 1, 0, 0)) .withReceiveWorkers(receiveWorkers) .apply(); } @Override public ReceiveFieldsNode transform(ReceiveFieldsNode receiveFieldsNode) { if (!isInLineRange(receiveFieldsNode)) { return receiveFieldsNode; } Token openBrace = getToken(receiveFieldsNode.openBrace()); SeparatedNodeList<NameReferenceNode> receiveFields = this.modifySeparatedNodeList(receiveFieldsNode.receiveFields()); Token closeBrace = getToken(receiveFieldsNode.closeBrace()); return receiveFieldsNode.modify() .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withReceiveFields(receiveFields) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public RestDescriptorNode transform(RestDescriptorNode restDescriptorNode) { if (!isInLineRange(restDescriptorNode)) { return restDescriptorNode; } TypeDescriptorNode typeDescriptor = this.modifyNode(restDescriptorNode.typeDescriptor()); Token ellipsisToken = getToken(restDescriptorNode.ellipsisToken()); return restDescriptorNode.modify() .withTypeDescriptor(typeDescriptor) .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .apply(); } @Override public DoubleGTTokenNode transform(DoubleGTTokenNode doubleGTTokenNode) { if (!isInLineRange(doubleGTTokenNode)) { return doubleGTTokenNode; } Token openGTToken = getToken(doubleGTTokenNode.openGTToken()); Token endGTToken = getToken(doubleGTTokenNode.endGTToken()); return doubleGTTokenNode.modify() .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0)) .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0)) .apply(); } @Override public TrippleGTTokenNode transform(TrippleGTTokenNode trippleGTTokenNode) { if (!isInLineRange(trippleGTTokenNode)) { return trippleGTTokenNode; } Token openGTToken = getToken(trippleGTTokenNode.openGTToken()); Token middleGTToken = getToken(trippleGTTokenNode.middleGTToken()); Token endGTToken = getToken(trippleGTTokenNode.endGTToken()); return trippleGTTokenNode.modify() .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0)) .withMiddleGTToken(formatToken(middleGTToken, 0, 0, 0, 0)) .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0)) .apply(); } @Override public WaitActionNode transform(WaitActionNode waitActionNode) { if (!isInLineRange(waitActionNode)) { return waitActionNode; } Token waitKeyword = getToken(waitActionNode.waitKeyword()); Node waitFutureExpr = this.modifyNode(waitActionNode.waitFutureExpr()); return waitActionNode.modify() .withWaitKeyword(formatToken(waitKeyword, 1, 1, 0, 0)) .withWaitFutureExpr(waitFutureExpr) .apply(); } @Override public WaitFieldsListNode transform(WaitFieldsListNode waitFieldsListNode) { if (!isInLineRange(waitFieldsListNode)) { return waitFieldsListNode; } Token openBrace = getToken(waitFieldsListNode.openBrace()); SeparatedNodeList<Node> waitFields = this.modifySeparatedNodeList(waitFieldsListNode.waitFields()); Token closeBrace = getToken(waitFieldsListNode.closeBrace()); return waitFieldsListNode.modify() .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withWaitFields(waitFields) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public WaitFieldNode transform(WaitFieldNode waitFieldNode) { if (!isInLineRange(waitFieldNode)) { return waitFieldNode; } SimpleNameReferenceNode fieldName = this.modifyNode(waitFieldNode.fieldName()); Token colon = getToken(waitFieldNode.colon()); ExpressionNode waitFutureExpr = this.modifyNode(waitFieldNode.waitFutureExpr()); return waitFieldNode.modify() .withFieldName(fieldName) .withColon(formatToken(colon, 1, 1, 0, 0)) .withWaitFutureExpr(waitFutureExpr) .apply(); } @Override public AnnotAccessExpressionNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) { if (!isInLineRange(annotAccessExpressionNode)) { return annotAccessExpressionNode; } ExpressionNode expression = this.modifyNode(annotAccessExpressionNode.expression()); Token annotChainingToken = getToken(annotAccessExpressionNode.annotChainingToken()); NameReferenceNode annotTagReference = this.modifyNode(annotAccessExpressionNode.annotTagReference()); return annotAccessExpressionNode.modify() .withExpression(expression) .withAnnotChainingToken(formatToken(annotChainingToken, 0, 0, 0, 0)) .withAnnotTagReference(annotTagReference) .apply(); } @Override public QueryActionNode transform(QueryActionNode queryActionNode) { if (!isInLineRange(queryActionNode)) { return queryActionNode; } QueryPipelineNode queryPipeline = this.modifyNode(queryActionNode.queryPipeline()); Token doKeyword = getToken(queryActionNode.doKeyword()); BlockStatementNode blockStatement = this.modifyNode(queryActionNode.blockStatement()); LimitClauseNode limitClause = this.modifyNode(queryActionNode.limitClause().orElse(null)); if (limitClause != null) { queryActionNode = queryActionNode.modify() .withLimitClause(limitClause).apply(); } return queryActionNode.modify() .withQueryPipeline(queryPipeline) .withDoKeyword(formatToken(doKeyword, 1, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public OptionalFieldAccessExpressionNode transform( OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) { if (!isInLineRange(optionalFieldAccessExpressionNode)) { return optionalFieldAccessExpressionNode; } ExpressionNode expression = this.modifyNode(optionalFieldAccessExpressionNode.expression()); Token optionalChainingToken = getToken(optionalFieldAccessExpressionNode.optionalChainingToken()); NameReferenceNode fieldName = this.modifyNode(optionalFieldAccessExpressionNode.fieldName()); return optionalFieldAccessExpressionNode.modify() .withExpression(expression) .withOptionalChainingToken(formatToken(optionalChainingToken, 0, 0, 0, 0)) .withFieldName(fieldName) .apply(); } @Override public ConditionalExpressionNode transform(ConditionalExpressionNode conditionalExpressionNode) { if (!isInLineRange(conditionalExpressionNode)) { return conditionalExpressionNode; } ExpressionNode lhsExpression = this.modifyNode(conditionalExpressionNode.lhsExpression()); Token questionMarkToken = getToken(conditionalExpressionNode.questionMarkToken()); ExpressionNode middleExpression = this.modifyNode(conditionalExpressionNode.middleExpression()); Token colonToken = getToken(conditionalExpressionNode.colonToken()); ExpressionNode endExpression = this.modifyNode(conditionalExpressionNode.endExpression()); return conditionalExpressionNode.modify() .withLhsExpression(lhsExpression) .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0)) .withMiddleExpression(middleExpression) .withColonToken(formatToken(colonToken, 1, 1, 0, 0)) .withEndExpression(endExpression) .apply(); } @Override public EnumDeclarationNode transform(EnumDeclarationNode enumDeclarationNode) { if (!isInLineRange(enumDeclarationNode)) { return enumDeclarationNode; } MetadataNode metadata = this.modifyNode(enumDeclarationNode.metadata().orElse(null)); Token qualifier = getToken(enumDeclarationNode.qualifier()); Token enumKeywordToken = getToken(enumDeclarationNode.enumKeywordToken()); IdentifierToken identifier = this.modifyNode(enumDeclarationNode.identifier()); Token openBraceToken = getToken(enumDeclarationNode.openBraceToken()); SeparatedNodeList<Node> enumMemberList = this.modifySeparatedNodeList(enumDeclarationNode.enumMemberList()); Token closeBraceToken = getToken(enumDeclarationNode.closeBraceToken()); if (metadata != null) { enumDeclarationNode = enumDeclarationNode.modify() .withMetadata(metadata).apply(); } return enumDeclarationNode.modify() .withQualifier(formatToken(qualifier, 1, 1, 0, 0)) .withEnumKeywordToken(formatToken(enumKeywordToken, 0, 1, 0, 0)) .withIdentifier(identifier) .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0)) .withEnumMemberList(enumMemberList) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0)) .apply(); } @Override public EnumMemberNode transform(EnumMemberNode enumMemberNode) { if (!isInLineRange(enumMemberNode)) { return enumMemberNode; } MetadataNode metadata = this.modifyNode(enumMemberNode.metadata().orElse(null)); IdentifierToken identifier = this.modifyNode(enumMemberNode.identifier()); Token equalToken = getToken(enumMemberNode.equalToken().orElse(null)); ExpressionNode constExprNode = this.modifyNode(enumMemberNode.constExprNode().orElse(null)); if (metadata != null) { enumMemberNode = enumMemberNode.modify() .withMetadata(metadata).apply(); } return enumMemberNode.modify() .withEqualToken(formatToken(equalToken, 1, 1, 0, 0)) .withIdentifier(identifier) .withConstExprNode(constExprNode) .apply(); } @Override public TransactionStatementNode transform(TransactionStatementNode transactionStatementNode) { if (!isInLineRange(transactionStatementNode)) { return transactionStatementNode; } Token transactionKeyword = getToken(transactionStatementNode.transactionKeyword()); BlockStatementNode blockStatement = this.modifyNode(transactionStatementNode.blockStatement()); return transactionStatementNode.modify() .withTransactionKeyword(formatToken(transactionKeyword, 1, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public RollbackStatementNode transform(RollbackStatementNode rollbackStatementNode) { if (!isInLineRange(rollbackStatementNode)) { return rollbackStatementNode; } Token rollbackKeyword = getToken(rollbackStatementNode.rollbackKeyword()); ExpressionNode expression = this.modifyNode(rollbackStatementNode.expression().orElse(null)); Token semicolon = getToken(rollbackStatementNode.semicolon()); if (expression != null) { rollbackStatementNode = rollbackStatementNode.modify() .withExpression(expression).apply(); } return rollbackStatementNode.modify() .withRollbackKeyword(formatToken(rollbackKeyword, 1, 1, 0, 0)) .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)) .apply(); } @Override public RetryStatementNode transform(RetryStatementNode retryStatementNode) { if (!isInLineRange(retryStatementNode)) { return retryStatementNode; } Token retryKeyword = getToken(retryStatementNode.retryKeyword()); TypeParameterNode typeParameter = this.modifyNode(retryStatementNode.typeParameter().orElse(null)); ParenthesizedArgList arguments = this.modifyNode(retryStatementNode.arguments().orElse(null)); StatementNode retryBody = this.modifyNode(retryStatementNode.retryBody()); if (typeParameter != null) { retryStatementNode = retryStatementNode.modify() .withTypeParameter(typeParameter).apply(); } if (arguments != null) { retryStatementNode = retryStatementNode.modify() .withArguments(arguments).apply(); } return retryStatementNode.modify() .withRetryKeyword(formatToken(retryKeyword, 1, 1, 0, 0)) .withRetryBody(retryBody) .apply(); } @Override public CommitActionNode transform(CommitActionNode commitActionNode) { if (!isInLineRange(commitActionNode)) { return commitActionNode; } Token commitKeyword = getToken(commitActionNode.commitKeyword()); return commitActionNode.modify() .withCommitKeyword(formatToken(commitKeyword, 1, 1, 0, 0)) .apply(); } @Override public TransactionalExpressionNode transform(TransactionalExpressionNode transactionalExpressionNode) { if (!isInLineRange(transactionalExpressionNode)) { return transactionalExpressionNode; } Token transactionalKeyword = getToken(transactionalExpressionNode.transactionalKeyword()); return transactionalExpressionNode.modify() .withTransactionalKeyword(formatToken(transactionalKeyword, 1, 1, 0, 0)) .apply(); } @Override public ServiceConstructorExpressionNode transform( ServiceConstructorExpressionNode serviceConstructorExpressionNode) { if (!isInLineRange(serviceConstructorExpressionNode)) { return serviceConstructorExpressionNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(serviceConstructorExpressionNode.annotations()); Token serviceKeyword = getToken(serviceConstructorExpressionNode.serviceKeyword()); Node serviceBody = this.modifyNode(serviceConstructorExpressionNode.serviceBody()); return serviceConstructorExpressionNode.modify() .withAnnotations(annotations) .withServiceKeyword(formatToken(serviceKeyword, 1, 1, 0, 0)) .withServiceBody(serviceBody) .apply(); } @Override public TypeReferenceTypeDescNode transform(TypeReferenceTypeDescNode typeReferenceTypeDescNode) { if (!isInLineRange(typeReferenceTypeDescNode)) { return typeReferenceTypeDescNode; } NameReferenceNode typeRef = this.modifyNode(typeReferenceTypeDescNode.typeRef()); return typeReferenceTypeDescNode.modify() .withTypeRef(typeRef) .apply(); } @Override public MatchStatementNode transform(MatchStatementNode matchStatementNode) { if (!isInLineRange(matchStatementNode)) { return matchStatementNode; } Token matchKeyword = getToken(matchStatementNode.matchKeyword()); ExpressionNode condition = this.modifyNode(matchStatementNode.condition()); Token openBrace = getToken(matchStatementNode.openBrace()); NodeList<MatchClauseNode> matchClauses = this.modifyNodeList(matchStatementNode.matchClauses()); Token closeBrace = getToken(matchStatementNode.closeBrace()); return matchStatementNode.modify() .withMatchKeyword(formatToken(matchKeyword, 1, 1, 0, 0)) .withCondition(condition) .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withMatchClauses(matchClauses) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public MatchClauseNode transform(MatchClauseNode matchClauseNode) { if (!isInLineRange(matchClauseNode)) { return matchClauseNode; } SeparatedNodeList<Node> matchPatterns = this.modifySeparatedNodeList(matchClauseNode.matchPatterns()); MatchGuardNode matchGuard = this.modifyNode(matchClauseNode.matchGuard().orElse(null)); Token rightDoubleArrow = getToken(matchClauseNode.rightDoubleArrow()); BlockStatementNode blockStatement = this.modifyNode(matchClauseNode.blockStatement()); if (matchGuard != null) { matchClauseNode = matchClauseNode.modify() .withMatchGuard(matchGuard).apply(); } return matchClauseNode.modify() .withMatchPatterns(matchPatterns) .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public MatchGuardNode transform(MatchGuardNode matchGuardNode) { if (!isInLineRange(matchGuardNode)) { return matchGuardNode; } Token ifKeyword = getToken(matchGuardNode.ifKeyword()); ExpressionNode expression = this.modifyNode(matchGuardNode.expression()); return matchGuardNode.modify() .withIfKeyword(formatToken(ifKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public DistinctTypeDescriptorNode transform(DistinctTypeDescriptorNode distinctTypeDescriptorNode) { if (!isInLineRange(distinctTypeDescriptorNode)) { return distinctTypeDescriptorNode; } Token distinctKeyword = getToken(distinctTypeDescriptorNode.distinctKeyword()); TypeDescriptorNode typeDescriptor = this.modifyNode(distinctTypeDescriptorNode.typeDescriptor()); return distinctTypeDescriptorNode.modify() .withDistinctKeyword(formatToken(distinctKeyword, 1, 1, 0, 0)) .withTypeDescriptor(typeDescriptor) .apply(); } @Override public OnConflictClauseNode transform(OnConflictClauseNode onConflictClauseNode) { if (!isInLineRange(onConflictClauseNode)) { return onConflictClauseNode; } Token onKeyword = getToken(onConflictClauseNode.onKeyword()); Token conflictKeyword = getToken(onConflictClauseNode.conflictKeyword()); ExpressionNode expression = this.modifyNode(onConflictClauseNode.expression()); return onConflictClauseNode.modify() .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0)) .withConflictKeyword(formatToken(conflictKeyword, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public LimitClauseNode transform(LimitClauseNode limitClauseNode) { if (!isInLineRange(limitClauseNode)) { return limitClauseNode; } Token limitKeyword = getToken(limitClauseNode.limitKeyword()); ExpressionNode expression = this.modifyNode(limitClauseNode.expression()); return limitClauseNode.modify() .withLimitKeyword(formatToken(limitKeyword, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public JoinClauseNode transform(JoinClauseNode joinClauseNode) { if (!isInLineRange(joinClauseNode)) { return joinClauseNode; } Token outerKeyword = getToken(joinClauseNode.outerKeyword().orElse(null)); Token joinKeyword = getToken(joinClauseNode.joinKeyword()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(joinClauseNode.typedBindingPattern()); Token inKeyword = getToken(joinClauseNode.inKeyword()); ExpressionNode expression = this.modifyNode(joinClauseNode.expression()); OnClauseNode onCondition = this.modifyNode(joinClauseNode.onCondition()); if (outerKeyword != null) { joinClauseNode = joinClauseNode.modify() .withOuterKeyword(formatToken(outerKeyword, 1, 1, 0, 0)).apply(); } return joinClauseNode.modify() .withJoinKeyword(formatToken(joinKeyword, 1, 1, 0, 0)) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0)) .withExpression(expression) .withOnCondition(onCondition) .apply(); } @Override public OnClauseNode transform(OnClauseNode onClauseNode) { if (!isInLineRange(onClauseNode)) { return onClauseNode; } Token onKeyword = getToken(onClauseNode.onKeyword()); Token equalsKeyword = getToken(onClauseNode.equalsKeyword()); ExpressionNode lhsExpr = this.modifyNode(onClauseNode.lhsExpression()); ExpressionNode rhsExpr = this.modifyNode(onClauseNode.rhsExpression()); return onClauseNode.modify() .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0)) .withLhsExpression(lhsExpr) .withEqualsKeyword(formatToken(equalsKeyword, 1, 1, 0, 0)) .withRhsExpression(rhsExpr) .apply(); } @Override public ListMatchPatternNode transform(ListMatchPatternNode listMatchPatternNode) { if (!isInLineRange(listMatchPatternNode)) { return listMatchPatternNode; } Token openBracket = getToken(listMatchPatternNode.openBracket()); SeparatedNodeList<Node> matchPatterns = this.modifySeparatedNodeList(listMatchPatternNode.matchPatterns()); RestMatchPatternNode restMatchPattern = this.modifyNode(listMatchPatternNode.restMatchPattern().orElse(null)); Token closeBracket = getToken(listMatchPatternNode.closeBracket()); return listMatchPatternNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withMatchPatterns(matchPatterns) .withRestMatchPattern(restMatchPattern) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public RestMatchPatternNode transform(RestMatchPatternNode restMatchPatternNode) { if (!isInLineRange(restMatchPatternNode)) { return restMatchPatternNode; } Token ellipsisToken = getToken(restMatchPatternNode.ellipsisToken()); Token varKeywordToken = getToken(restMatchPatternNode.varKeywordToken()); SimpleNameReferenceNode variableName = this.modifyNode(restMatchPatternNode.variableName()); return restMatchPatternNode.modify() .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .withVarKeywordToken(formatToken(varKeywordToken, 1, 1, 0, 0)) .withVariableName(variableName) .apply(); } @Override public FieldMatchPatternNode transform(FieldMatchPatternNode fieldMatchPatternNode) { if (!isInLineRange(fieldMatchPatternNode)) { return fieldMatchPatternNode; } SimpleNameReferenceNode fieldNameNode = this.modifyNode(fieldMatchPatternNode.fieldNameNode()); Token colonToken = getToken(fieldMatchPatternNode.colonToken()); Node matchPattern = this.modifyNode(fieldMatchPatternNode.matchPattern()); return fieldMatchPatternNode.modify() .withFieldNameNode(fieldNameNode) .withColonToken(formatToken(colonToken, 1, 1, 0, 0)) .withMatchPattern(matchPattern) .apply(); } @Override public FunctionalMatchPatternNode transform(FunctionalMatchPatternNode functionalMatchPatternNode) { if (!isInLineRange(functionalMatchPatternNode)) { return functionalMatchPatternNode; } Node typeRef = this.modifyNode(functionalMatchPatternNode.typeRef()); Token openParenthesisToken = getToken(functionalMatchPatternNode.openParenthesisToken()); SeparatedNodeList<Node> argListMatchPatternNode = this.modifySeparatedNodeList(functionalMatchPatternNode.argListMatchPatternNode()); Token closeParenthesisToken = getToken(functionalMatchPatternNode.closeParenthesisToken()); return functionalMatchPatternNode.modify() .withTypeRef(typeRef) .withOpenParenthesisToken(formatToken(openParenthesisToken, 0, 0, 0, 0)) .withArgListMatchPatternNode(argListMatchPatternNode) .withCloseParenthesisToken(formatToken(closeParenthesisToken, 0, 0, 0, 0)) .apply(); } @Override public NamedArgMatchPatternNode transform(NamedArgMatchPatternNode namedArgMatchPatternNode) { if (!isInLineRange(namedArgMatchPatternNode)) { return namedArgMatchPatternNode; } IdentifierToken identifier = this.modifyNode(namedArgMatchPatternNode.identifier()); Token equalToken = getToken(namedArgMatchPatternNode.equalToken()); Node matchPattern = this.modifyNode(namedArgMatchPatternNode.matchPattern()); return namedArgMatchPatternNode.modify() .withIdentifier(identifier) .withEqualToken(formatToken(equalToken, 1, 1, 0, 0)) .withMatchPattern(matchPattern) .apply(); } @Override public MarkdownDocumentationNode transform(MarkdownDocumentationNode markdownDocumentationNode) { if (!isInLineRange(markdownDocumentationNode)) { return markdownDocumentationNode; } NodeList<Node> documentationLines = this.modifyNodeList(markdownDocumentationNode.documentationLines()); return markdownDocumentationNode.modify() .withDocumentationLines(documentationLines) .apply(); } @Override public MarkdownDocumentationLineNode transform(MarkdownDocumentationLineNode markdownDocumentationLineNode) { if (!isInLineRange(markdownDocumentationLineNode)) { return markdownDocumentationLineNode; } Token hashToken = getToken(markdownDocumentationLineNode.hashToken()); NodeList<Node> documentElements = this.modifyNodeList(markdownDocumentationLineNode.documentElements()); return markdownDocumentationLineNode.modify() .withDocumentElements(documentElements) .withHashToken(formatToken(hashToken, 1, 1, 0, 0)) .apply(); } @Override public MarkdownParameterDocumentationLineNode transform( MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) { if (!isInLineRange(markdownParameterDocumentationLineNode)) { return markdownParameterDocumentationLineNode; } Token hashToken = getToken(markdownParameterDocumentationLineNode.hashToken()); Token plusToken = getToken(markdownParameterDocumentationLineNode.plusToken()); Token parameterName = getToken(markdownParameterDocumentationLineNode.parameterName()); Token minusToken = getToken(markdownParameterDocumentationLineNode.minusToken()); NodeList<Node> documentElements = this.modifyNodeList(markdownParameterDocumentationLineNode.documentElements()); return markdownParameterDocumentationLineNode.modify() .withHashToken(formatToken(hashToken, 1, 1, 0, 0)) .withPlusToken(formatToken(plusToken, 1, 1, 0, 0)) .withParameterName(formatToken(parameterName, 1, 1, 0, 0)) .withMinusToken(formatToken(minusToken, 1, 1, 0, 0)) .withDocumentElements(documentElements) .apply(); } @Override public DocumentationReferenceNode transform(DocumentationReferenceNode documentationReferenceNode) { if (!isInLineRange(documentationReferenceNode)) { return documentationReferenceNode; } Token referenceType = getToken(documentationReferenceNode.referenceType().orElse(null)); Token startBacktick = getToken(documentationReferenceNode.startBacktick()); Node backtickContent = this.modifyNode(documentationReferenceNode.backtickContent()); Token endBacktick = getToken(documentationReferenceNode.endBacktick()); if (referenceType != null) { documentationReferenceNode = documentationReferenceNode.modify() .withReferenceType(referenceType).apply(); } return documentationReferenceNode.modify() .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0)) .withBacktickContent(backtickContent) .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0)) .apply(); } @Override public OrderByClauseNode transform(OrderByClauseNode orderByClauseNode) { if (!isInLineRange(orderByClauseNode)) { return orderByClauseNode; } Token orderKeyword = getToken(orderByClauseNode.orderKeyword()); Token byKeyword = getToken(orderByClauseNode.byKeyword()); SeparatedNodeList<OrderKeyNode> orderKey = this.modifySeparatedNodeList(orderByClauseNode.orderKey()); return orderByClauseNode.modify() .withOrderKeyword(formatToken(orderKeyword, 1, 1, 0, 0)) .withByKeyword(formatToken(byKeyword, 1, 1, 0, 0)) .withOrderKey(orderKey) .apply(); } @Override public OrderKeyNode transform(OrderKeyNode orderKeyNode) { if (!isInLineRange(orderKeyNode)) { return orderKeyNode; } ExpressionNode expression = this.modifyNode(orderKeyNode.expression()); Token orderDirection = getToken(orderKeyNode.orderDirection().orElse(null)); if (orderDirection != null) { orderKeyNode = orderKeyNode.modify() .withOrderDirection(formatToken(orderDirection, 1, 1, 0, 0)).apply(); } return orderKeyNode.modify() .withExpression(expression) .apply(); } /** * Update the minutiae and return the token. * * @param token token * @param leadingSpaces leading spaces * @param trailingSpaces trailing spaces * @param leadingNewLines leading new lines * @param trailingNewLines trailing new lines * @return updated token */ private Token formatToken(Token token, int leadingSpaces, int trailingSpaces, int leadingNewLines, int trailingNewLines) { if (token == null) { return token; } MinutiaeList leadingMinutiaeList = token.leadingMinutiae(); MinutiaeList trailingMinutiaeList = token.trailingMinutiae(); MinutiaeList newLeadingMinutiaeList = modifyMinutiaeList(leadingMinutiaeList, leadingSpaces, leadingNewLines); MinutiaeList newTrailingMinutiaeList = modifyMinutiaeList(trailingMinutiaeList, trailingSpaces, trailingNewLines); return token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList); } private MinutiaeList modifyMinutiaeList(MinutiaeList minutiaeList, int spaces, int newLines) { Minutiae minutiae = NodeFactory.createWhitespaceMinutiae(getWhiteSpaces(spaces, newLines)); return minutiaeList.add(minutiae); } private String getWhiteSpaces(int column, int newLines) { StringBuilder whiteSpaces = new StringBuilder(); for (int i = 0; i <= (newLines - 1); i++) { whiteSpaces.append("\n"); } for (int i = 0; i <= (column - 1); i++) { whiteSpaces.append(" "); } return whiteSpaces.toString(); } /** * Initialize the token with empty minutiae lists. * * @param node node * @return token with empty minutiae */ private <T extends Token> Token getToken(T node) { if (node == null) { return node; } MinutiaeList leadingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList(); MinutiaeList trailingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList(); if (node.containsLeadingMinutiae()) { leadingMinutiaeList = getCommentMinutiae(node.leadingMinutiae(), true); } if (node.containsTrailingMinutiae()) { trailingMinutiaeList = getCommentMinutiae(node.trailingMinutiae(), false); } return node.modify(leadingMinutiaeList, trailingMinutiaeList); } private MinutiaeList getCommentMinutiae(MinutiaeList minutiaeList, boolean isLeading) { MinutiaeList minutiaes = AbstractNodeFactory.createEmptyMinutiaeList(); for (int i = 0; i < minutiaeList.size(); i++) { if (minutiaeList.get(i).kind().equals(SyntaxKind.COMMENT_MINUTIAE)) { if (i > 0) { minutiaes = minutiaes.add(minutiaeList.get(i - 1)); } minutiaes = minutiaes.add(minutiaeList.get(i)); if ((i + 1) < minutiaeList.size() && isLeading) { minutiaes = minutiaes.add(minutiaeList.get(i + 1)); } } } return minutiaes; } private <T extends Node> Node getParent(T node, SyntaxKind syntaxKind) { Node parent = node.parent(); if (parent == null) { parent = node; } SyntaxKind parentKind = parent.kind(); if (parentKind == SyntaxKind.MODULE_VAR_DECL) { if (parent.parent() != null && parent.parent().kind() == SyntaxKind.MODULE_PART && syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return null; } return parent; } else if (parentKind == SyntaxKind.FUNCTION_DEFINITION || parentKind == SyntaxKind.IF_ELSE_STATEMENT || parentKind == SyntaxKind.ELSE_BLOCK || parentKind == SyntaxKind.SPECIFIC_FIELD || parentKind == SyntaxKind.WHILE_STATEMENT) { return parent; } else if (syntaxKind == SyntaxKind.SIMPLE_NAME_REFERENCE) { if (parentKind == SyntaxKind.REQUIRED_PARAM || parentKind == SyntaxKind.POSITIONAL_ARG || parentKind == SyntaxKind.BINARY_EXPRESSION || parentKind == SyntaxKind.RETURN_STATEMENT || parentKind == SyntaxKind.LOCAL_VAR_DECL || (parentKind == SyntaxKind.FUNCTION_CALL && parent.parent() != null && parent.parent().kind() == SyntaxKind.ASSIGNMENT_STATEMENT)) { return null; } return getParent(parent, syntaxKind); } else if (parentKind == SyntaxKind.SERVICE_DECLARATION || parentKind == SyntaxKind.BINARY_EXPRESSION) { if (syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return null; } return parent; } else if (parentKind == SyntaxKind.REQUIRED_PARAM || parentKind == SyntaxKind.RETURN_TYPE_DESCRIPTOR) { return null; } else if (parent.parent() != null) { return getParent(parent, syntaxKind); } else { return null; } } /** * Get the node position. * * @param node node * @return node position */ private DiagnosticPos getPosition(Node node) { if (node == null) { return null; } LineRange range = node.lineRange(); LinePosition startPos = range.startLine(); LinePosition endPos = range.endLine(); return new DiagnosticPos(null, startPos.line() + 1, endPos.line() + 1, startPos.offset(), endPos.offset()); } /** * return the indented start column. * * @param node node * @param syntaxKind node kind * @param addSpaces add spaces or not * @return start position */ private int getStartColumn(Node node, SyntaxKind syntaxKind, boolean addSpaces) { Node parent = getParent(node, syntaxKind); if (parent != null) { return getPosition(parent).sCol + (addSpaces ? 4 : 0); } return 0; } private boolean isInLineRange(Node node) { if (this.lineRange == null) { return true; } int nodeStartLine = node.lineRange().startLine().line(); int nodeStartOffset = node.lineRange().startLine().offset(); int nodeEndLine = node.lineRange().endLine().line(); int nodeEndOffset = node.lineRange().endLine().offset(); int startLine = this.lineRange.startLine().line(); int startOffset = this.lineRange.startLine().offset(); int endLine = this.lineRange.endLine().line(); int endOffset = this.lineRange.endLine().offset(); if (nodeStartLine >= startLine && nodeEndLine <= endLine) { if (nodeStartLine == startLine || nodeEndLine == endLine) { return nodeStartOffset >= startOffset && nodeEndOffset <= endOffset; } return true; } return false; } public FormattingOptions getFormattingOptions() { return formattingOptions; } void setFormattingOptions(FormattingOptions formattingOptions) { this.formattingOptions = formattingOptions; } void setLineRange(LineRange lineRange) { this.lineRange = lineRange; } }
class FormattingTreeModifier extends TreeModifier { private FormattingOptions formattingOptions; private LineRange lineRange; @Override public ImportDeclarationNode transform(ImportDeclarationNode importDeclarationNode) { if (!isInLineRange(importDeclarationNode)) { return importDeclarationNode; } Token importKeyword = getToken(importDeclarationNode.importKeyword()); Token semicolon = getToken(importDeclarationNode.semicolon()); SeparatedNodeList<IdentifierToken> moduleNames = this.modifySeparatedNodeList( importDeclarationNode.moduleName()); ImportOrgNameNode orgName = this.modifyNode(importDeclarationNode.orgName().orElse(null)); ImportPrefixNode prefix = this.modifyNode(importDeclarationNode.prefix().orElse(null)); ImportVersionNode version = this.modifyNode(importDeclarationNode.version().orElse(null)); if (orgName != null) { importDeclarationNode = importDeclarationNode.modify() .withOrgName(orgName).apply(); } if (prefix != null) { importDeclarationNode = importDeclarationNode.modify() .withPrefix(prefix).apply(); } if (version != null) { importDeclarationNode = importDeclarationNode.modify() .withVersion(version).apply(); } return importDeclarationNode.modify() .withImportKeyword(formatToken(importKeyword, 0, 0, 0, 0)) .withModuleName(moduleNames) .withSemicolon(formatToken(semicolon, 0, 0, 0, 1)) .apply(); } @Override public ImportOrgNameNode transform(ImportOrgNameNode importOrgNameNode) { if (!isInLineRange(importOrgNameNode)) { return importOrgNameNode; } Token orgName = getToken(importOrgNameNode.orgName()); Token slashToken = getToken(importOrgNameNode.slashToken()); return importOrgNameNode.modify() .withOrgName(formatToken(orgName, 1, 0, 0, 0)) .withSlashToken(formatToken(slashToken, 0, 0, 0, 0)) .apply(); } @Override public ImportPrefixNode transform(ImportPrefixNode importPrefixNode) { if (!isInLineRange(importPrefixNode)) { return importPrefixNode; } Token asKeyword = getToken(importPrefixNode.asKeyword()); Token prefix = getToken(importPrefixNode.prefix()); return importPrefixNode.modify() .withAsKeyword(formatToken(asKeyword, 1, 0, 0, 0)) .withPrefix(formatToken(prefix, 1, 0, 0, 0)) .apply(); } @Override public ImportVersionNode transform(ImportVersionNode importVersionNode) { if (!isInLineRange(importVersionNode)) { return importVersionNode; } Token versionKeyword = getToken(importVersionNode.versionKeyword()); SeparatedNodeList<Token> versionNumber = this.modifySeparatedNodeList(importVersionNode.versionNumber()); return importVersionNode.modify() .withVersionKeyword(formatToken(versionKeyword, 1, 1, 0, 0)) .withVersionNumber(versionNumber) .apply(); } @Override public IdentifierToken transform(IdentifierToken identifier) { if (!isInLineRange(identifier)) { return identifier; } Token identifierToken = getToken(identifier); return (IdentifierToken) formatToken(identifierToken, 0, 0, 0, 0); } @Override public FunctionDefinitionNode transform(FunctionDefinitionNode functionDefinitionNode) { if (!isInLineRange(functionDefinitionNode)) { return functionDefinitionNode; } MetadataNode metadata = this.modifyNode(functionDefinitionNode.metadata().orElse(null)); NodeList<Token> qualifierList = this.modifyNodeList(functionDefinitionNode.qualifierList()); Token functionKeyword = getToken(functionDefinitionNode.functionKeyword()); Token functionName = getToken(functionDefinitionNode.functionName()); FunctionSignatureNode functionSignatureNode = this.modifyNode(functionDefinitionNode.functionSignature()); FunctionBodyNode functionBodyNode = this.modifyNode(functionDefinitionNode.functionBody()); if (metadata != null) { functionDefinitionNode = functionDefinitionNode.modify() .withMetadata(metadata).apply(); } return functionDefinitionNode.modify() .withFunctionKeyword(formatToken(functionKeyword, 0, 0, 0, 0)) .withFunctionName((IdentifierToken) formatToken(functionName, 1, 0, 0, 0)) .withFunctionSignature(functionSignatureNode) .withQualifierList(qualifierList) .withFunctionBody(functionBodyNode) .apply(); } @Override public FunctionSignatureNode transform(FunctionSignatureNode functionSignatureNode) { if (!isInLineRange(functionSignatureNode)) { return functionSignatureNode; } Token openPara = getToken(functionSignatureNode.openParenToken()); Token closePara = getToken(functionSignatureNode.closeParenToken()); SeparatedNodeList<ParameterNode> parameters = this.modifySeparatedNodeList(functionSignatureNode.parameters()); ReturnTypeDescriptorNode returnTypeDesc = this.modifyNode(functionSignatureNode.returnTypeDesc().orElse(null)); if (returnTypeDesc != null) { functionSignatureNode = functionSignatureNode.modify() .withReturnTypeDesc(returnTypeDesc).apply(); } return functionSignatureNode.modify() .withOpenParenToken(formatToken(openPara, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closePara, 0, 0, 0, 0)) .withParameters(parameters) .apply(); } @Override public ReturnTypeDescriptorNode transform(ReturnTypeDescriptorNode returnTypeDescriptorNode) { if (!isInLineRange(returnTypeDescriptorNode)) { return returnTypeDescriptorNode; } Token returnsKeyword = getToken(returnTypeDescriptorNode.returnsKeyword()); NodeList<AnnotationNode> annotations = this.modifyNodeList(returnTypeDescriptorNode.annotations()); Node type = this.modifyNode(returnTypeDescriptorNode.type()); return returnTypeDescriptorNode.modify() .withAnnotations(annotations) .withReturnsKeyword(formatToken(returnsKeyword, 1, 1, 0, 0)) .withType(type) .apply(); } @Override public OptionalTypeDescriptorNode transform(OptionalTypeDescriptorNode optionalTypeDescriptorNode) { if (!isInLineRange(optionalTypeDescriptorNode)) { return optionalTypeDescriptorNode; } Node typeDescriptor = this.modifyNode(optionalTypeDescriptorNode.typeDescriptor()); Token questionMarkToken = getToken(optionalTypeDescriptorNode.questionMarkToken()); return optionalTypeDescriptorNode.modify() .withTypeDescriptor(typeDescriptor) .withQuestionMarkToken(formatToken(questionMarkToken, 0, 0, 0, 0)) .apply(); } @Override public RequiredParameterNode transform(RequiredParameterNode requiredParameterNode) { if (!isInLineRange(requiredParameterNode)) { return requiredParameterNode; } Token paramName = getToken(requiredParameterNode.paramName().orElse(null)); NodeList<AnnotationNode> annotations = this.modifyNodeList(requiredParameterNode.annotations()); Node typeName = this.modifyNode(requiredParameterNode.typeName()); if (paramName != null) { requiredParameterNode = requiredParameterNode.modify() .withParamName(formatToken(paramName, 1, 0, 0, 0)).apply(); } return requiredParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .apply(); } @Override public BuiltinSimpleNameReferenceNode transform(BuiltinSimpleNameReferenceNode builtinSimpleNameReferenceNode) { if (!isInLineRange(builtinSimpleNameReferenceNode)) { return builtinSimpleNameReferenceNode; } int startCol = getStartColumn(builtinSimpleNameReferenceNode, builtinSimpleNameReferenceNode.kind(), true); Token name = getToken(builtinSimpleNameReferenceNode.name()); return builtinSimpleNameReferenceNode.modify() .withName(formatToken(name, startCol, 0, 0, 0)) .apply(); } @Override public FunctionBodyBlockNode transform(FunctionBodyBlockNode functionBodyBlockNode) { if (!isInLineRange(functionBodyBlockNode)) { return functionBodyBlockNode; } int startColumn = getStartColumn(functionBodyBlockNode, functionBodyBlockNode.kind(), false); Token functionBodyOpenBrace = getToken(functionBodyBlockNode.openBraceToken()); Token functionBodyCloseBrace = getToken(functionBodyBlockNode.closeBraceToken()); NodeList<StatementNode> statements = this.modifyNodeList(functionBodyBlockNode.statements()); NamedWorkerDeclarator namedWorkerDeclarator = this.modifyNode(functionBodyBlockNode.namedWorkerDeclarator().orElse(null)); if (namedWorkerDeclarator != null) { functionBodyBlockNode = functionBodyBlockNode.modify() .withNamedWorkerDeclarator(namedWorkerDeclarator).apply(); } return functionBodyBlockNode.modify() .withOpenBraceToken(formatToken(functionBodyOpenBrace, 1, 0, 0, 1)) .withCloseBraceToken(formatToken(functionBodyCloseBrace, startColumn, 0, 0, 1)) .withStatements(statements) .apply(); } @Override public ExpressionStatementNode transform(ExpressionStatementNode expressionStatementNode) { if (!isInLineRange(expressionStatementNode)) { return expressionStatementNode; } ExpressionNode expression = this.modifyNode(expressionStatementNode.expression()); Token semicolonToken = expressionStatementNode.semicolonToken(); return expressionStatementNode.modify() .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public FunctionCallExpressionNode transform(FunctionCallExpressionNode functionCallExpressionNode) { if (!isInLineRange(functionCallExpressionNode)) { return functionCallExpressionNode; } NameReferenceNode functionName = this.modifyNode(functionCallExpressionNode.functionName()); Token functionCallOpenPara = getToken(functionCallExpressionNode.openParenToken()); Token functionCallClosePara = getToken(functionCallExpressionNode.closeParenToken()); SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(functionCallExpressionNode .arguments()); return functionCallExpressionNode.modify() .withFunctionName(functionName) .withOpenParenToken(formatToken(functionCallOpenPara, 0, 0, 0, 0)) .withCloseParenToken(formatToken(functionCallClosePara, 0, 0, 0, 0)) .withArguments(arguments) .apply(); } @Override public QualifiedNameReferenceNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) { if (!isInLineRange(qualifiedNameReferenceNode)) { return qualifiedNameReferenceNode; } int startCol = getStartColumn(qualifiedNameReferenceNode, qualifiedNameReferenceNode.kind(), false); Token modulePrefix = getToken(qualifiedNameReferenceNode.modulePrefix()); Token identifier = getToken(qualifiedNameReferenceNode.identifier()); Token colon = getToken((Token) qualifiedNameReferenceNode.colon()); return qualifiedNameReferenceNode.modify() .withModulePrefix(formatToken(modulePrefix, startCol, 0, 0, 0)) .withIdentifier((IdentifierToken) formatToken(identifier, 0, 0, 0, 0)) .withColon(formatToken(colon, 0, 0, 0, 0)) .apply(); } @Override public PositionalArgumentNode transform(PositionalArgumentNode positionalArgumentNode) { if (!isInLineRange(positionalArgumentNode)) { return positionalArgumentNode; } ExpressionNode expression = this.modifyNode(positionalArgumentNode.expression()); return positionalArgumentNode.modify() .withExpression(expression) .apply(); } @Override public BasicLiteralNode transform(BasicLiteralNode basicLiteralNode) { if (!isInLineRange(basicLiteralNode)) { return basicLiteralNode; } Token literalToken = getToken(basicLiteralNode.literalToken()); return basicLiteralNode.modify() .withLiteralToken(formatToken(literalToken, 0, 0, 0, 0)) .apply(); } @Override public ServiceDeclarationNode transform(ServiceDeclarationNode serviceDeclarationNode) { if (!isInLineRange(serviceDeclarationNode)) { return serviceDeclarationNode; } Token serviceKeyword = getToken(serviceDeclarationNode.serviceKeyword()); IdentifierToken serviceName = (IdentifierToken) getToken(serviceDeclarationNode.serviceName()); Token onKeyword = getToken(serviceDeclarationNode.onKeyword()); MetadataNode metadata = this.modifyNode(serviceDeclarationNode.metadata().orElse(null)); SeparatedNodeList<ExpressionNode> expressions = this.modifySeparatedNodeList(serviceDeclarationNode.expressions()); Node serviceBody = this.modifyNode(serviceDeclarationNode.serviceBody()); if (metadata != null) { serviceDeclarationNode = serviceDeclarationNode.modify() .withMetadata(metadata).apply(); } return serviceDeclarationNode.modify() .withServiceKeyword(formatToken(serviceKeyword, 0, 0, 1, 0)) .withServiceName((IdentifierToken) formatToken(serviceName, 1, 0, 0, 0)) .withOnKeyword(formatToken(onKeyword, 1, 0, 0, 0)) .withExpressions(expressions) .withServiceBody(serviceBody) .apply(); } @Override public ServiceBodyNode transform(ServiceBodyNode serviceBodyNode) { if (!isInLineRange(serviceBodyNode)) { return serviceBodyNode; } Token openBraceToken = getToken(serviceBodyNode.openBraceToken()); Token closeBraceToken = getToken(serviceBodyNode.closeBraceToken()); NodeList<Node> resources = this.modifyNodeList(serviceBodyNode.resources()); return serviceBodyNode.modify() .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1)) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 1)) .withResources(resources) .apply(); } @Override public ExplicitNewExpressionNode transform(ExplicitNewExpressionNode explicitNewExpressionNode) { if (!isInLineRange(explicitNewExpressionNode)) { return explicitNewExpressionNode; } Token newKeywordToken = getToken(explicitNewExpressionNode.newKeyword()); TypeDescriptorNode typeDescriptorNode = this.modifyNode(explicitNewExpressionNode.typeDescriptor()); return explicitNewExpressionNode.modify() .withNewKeyword(formatToken(newKeywordToken, 1, 1, 0, 0)) .withParenthesizedArgList(modifyNode(explicitNewExpressionNode.parenthesizedArgList())) .withTypeDescriptor(typeDescriptorNode) .apply(); } @Override public ParenthesizedArgList transform(ParenthesizedArgList parenthesizedArgList) { if (!isInLineRange(parenthesizedArgList)) { return parenthesizedArgList; } Token openParenToken = getToken(parenthesizedArgList.openParenToken()); Token closeParenToken = getToken(parenthesizedArgList.closeParenToken()); SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(parenthesizedArgList .arguments()); return parenthesizedArgList.modify() .withArguments(arguments) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public VariableDeclarationNode transform(VariableDeclarationNode variableDeclarationNode) { if (!isInLineRange(variableDeclarationNode)) { return variableDeclarationNode; } Token semicolonToken = getToken(variableDeclarationNode.semicolonToken()); Token equalToken = getToken(variableDeclarationNode.equalsToken().orElse(null)); Token finalToken = getToken(variableDeclarationNode.finalKeyword().orElse(null)); ExpressionNode initializer = this.modifyNode(variableDeclarationNode.initializer().orElse(null)); NodeList<AnnotationNode> annotationNodes = this.modifyNodeList(variableDeclarationNode.annotations()); TypedBindingPatternNode typedBindingPatternNode = this.modifyNode( variableDeclarationNode.typedBindingPattern()); if (equalToken != null) { variableDeclarationNode = variableDeclarationNode.modify() .withEqualsToken(formatToken(equalToken, 1, 1, 0, 0)).apply(); } if (finalToken != null) { variableDeclarationNode = variableDeclarationNode.modify() .withFinalKeyword(formatToken(finalToken, 0, 0, 0, 0)).apply(); } if (initializer != null) { variableDeclarationNode = variableDeclarationNode.modify() .withInitializer(initializer).apply(); } return variableDeclarationNode.modify() .withAnnotations(annotationNodes) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .withTypedBindingPattern(typedBindingPatternNode) .apply(); } @Override public TypedBindingPatternNode transform(TypedBindingPatternNode typedBindingPatternNode) { if (!isInLineRange(typedBindingPatternNode)) { return typedBindingPatternNode; } BindingPatternNode bindingPatternNode = this.modifyNode(typedBindingPatternNode.bindingPattern()); TypeDescriptorNode typeDescriptorNode = this.modifyNode(typedBindingPatternNode.typeDescriptor()); return typedBindingPatternNode.modify() .withBindingPattern(bindingPatternNode) .withTypeDescriptor(typeDescriptorNode) .apply(); } @Override public CaptureBindingPatternNode transform(CaptureBindingPatternNode captureBindingPatternNode) { if (!isInLineRange(captureBindingPatternNode)) { return captureBindingPatternNode; } Token variableName = getToken(captureBindingPatternNode.variableName()); return captureBindingPatternNode.modify() .withVariableName(formatToken(variableName, 1, 0, 0, 0)) .apply(); } @Override public ListBindingPatternNode transform(ListBindingPatternNode listBindingPatternNode) { if (!isInLineRange(listBindingPatternNode)) { return listBindingPatternNode; } SeparatedNodeList<BindingPatternNode> bindingPatternNodes = this.modifySeparatedNodeList( listBindingPatternNode.bindingPatterns()); Token openBracket = getToken(listBindingPatternNode.openBracket()); Token closeBracket = getToken(listBindingPatternNode.closeBracket()); RestBindingPatternNode restBindingPattern = this.modifyNode(listBindingPatternNode.restBindingPattern().orElse(null)); if (restBindingPattern != null) { listBindingPatternNode = listBindingPatternNode.modify() .withRestBindingPattern(restBindingPattern).apply(); } return listBindingPatternNode.modify() .withBindingPatterns(bindingPatternNodes) .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public MappingBindingPatternNode transform(MappingBindingPatternNode mappingBindingPatternNode) { if (!isInLineRange(mappingBindingPatternNode)) { return mappingBindingPatternNode; } Token openBraceToken = getToken(mappingBindingPatternNode.openBrace()); Token closeBraceToken = getToken(mappingBindingPatternNode.closeBrace()); SeparatedNodeList<FieldBindingPatternNode> fieldBindingPatternNodes = this.modifySeparatedNodeList(mappingBindingPatternNode.fieldBindingPatterns()); RestBindingPatternNode restBindingPattern = this.modifyNode(mappingBindingPatternNode.restBindingPattern().orElse(null)); if (restBindingPattern != null) { mappingBindingPatternNode = mappingBindingPatternNode.modify() .withRestBindingPattern(restBindingPattern).apply(); } return mappingBindingPatternNode.modify() .withOpenBrace(formatToken(openBraceToken, 1, 0, 0, 1)) .withCloseBrace(formatToken(closeBraceToken, 0, 0, 1, 0)) .withFieldBindingPatterns(fieldBindingPatternNodes) .apply(); } @Override public FieldBindingPatternFullNode transform(FieldBindingPatternFullNode fieldBindingPatternFullNode) { if (!isInLineRange(fieldBindingPatternFullNode)) { return fieldBindingPatternFullNode; } Token colon = getToken(fieldBindingPatternFullNode.colon()); BindingPatternNode bindingPatternNode = this.modifyNode(fieldBindingPatternFullNode.bindingPattern()); SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternFullNode.variableName()); return fieldBindingPatternFullNode.modify() .withBindingPattern(bindingPatternNode) .withColon(formatToken(colon, 0, 0, 0, 0)) .withVariableName(variableName) .apply(); } @Override public FieldBindingPatternVarnameNode transform(FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode) { if (!isInLineRange(fieldBindingPatternVarnameNode)) { return fieldBindingPatternVarnameNode; } SimpleNameReferenceNode variableName = this.modifyNode(fieldBindingPatternVarnameNode.variableName()); return fieldBindingPatternVarnameNode.modify() .withVariableName(variableName) .apply(); } @Override public RestBindingPatternNode transform(RestBindingPatternNode restBindingPatternNode) { if (!isInLineRange(restBindingPatternNode)) { return restBindingPatternNode; } Token ellipsisToken = getToken(restBindingPatternNode.ellipsisToken()); SimpleNameReferenceNode variableName = restBindingPatternNode.variableName(); return restBindingPatternNode.modify() .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .withVariableName(variableName) .apply(); } @Override public RemoteMethodCallActionNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) { if (!isInLineRange(remoteMethodCallActionNode)) { return remoteMethodCallActionNode; } Token openParenToken = getToken(remoteMethodCallActionNode.openParenToken()); Token closeParenToken = getToken(remoteMethodCallActionNode.closeParenToken()); Token rightArrowToken = getToken(remoteMethodCallActionNode.rightArrowToken()); SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(remoteMethodCallActionNode .arguments()); ExpressionNode expression = this.modifyNode(remoteMethodCallActionNode.expression()); SimpleNameReferenceNode methodName = this.modifyNode(remoteMethodCallActionNode.methodName()); return remoteMethodCallActionNode.modify() .withArguments(arguments) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .withExpression(expression) .withMethodName(methodName) .withRightArrowToken(formatToken(rightArrowToken, 0, 0, 0, 0)) .apply(); } @Override public SimpleNameReferenceNode transform(SimpleNameReferenceNode simpleNameReferenceNode) { if (!isInLineRange(simpleNameReferenceNode)) { return simpleNameReferenceNode; } Token name = getToken(simpleNameReferenceNode.name()); return simpleNameReferenceNode.modify() .withName(formatToken(name, 0, 0, 0, 0)) .apply(); } @Override public IfElseStatementNode transform(IfElseStatementNode ifElseStatementNode) { if (!isInLineRange(ifElseStatementNode)) { return ifElseStatementNode; } BlockStatementNode ifBody = this.modifyNode(ifElseStatementNode.ifBody()); ExpressionNode condition = this.modifyNode(ifElseStatementNode.condition()); Token ifKeyword = getToken(ifElseStatementNode.ifKeyword()); Node elseBody = this.modifyNode(ifElseStatementNode.elseBody().orElse(null)); int startColumn = 1; if (ifElseStatementNode.parent().kind() != SyntaxKind.ELSE_BLOCK) { startColumn = getStartColumn(ifElseStatementNode, ifElseStatementNode.kind(), true); } if (elseBody != null) { ifElseStatementNode = ifElseStatementNode.modify() .withElseBody(elseBody).apply(); } return ifElseStatementNode.modify() .withIfKeyword(formatToken(ifKeyword, startColumn, 0, 0, 0)) .withIfBody(ifBody) .withCondition(condition) .apply(); } @Override public ElseBlockNode transform(ElseBlockNode elseBlockNode) { if (!isInLineRange(elseBlockNode)) { return elseBlockNode; } Token elseKeyword = getToken(elseBlockNode.elseKeyword()); StatementNode elseBody = this.modifyNode(elseBlockNode.elseBody()); return elseBlockNode.modify() .withElseKeyword(formatToken(elseKeyword, 1, 0, 0, 0)) .withElseBody(elseBody) .apply(); } @Override public BracedExpressionNode transform(BracedExpressionNode bracedExpressionNode) { if (!isInLineRange(bracedExpressionNode)) { return bracedExpressionNode; } Token openParen = getToken(bracedExpressionNode.openParen()); Token closeParen = getToken(bracedExpressionNode.closeParen()); ExpressionNode expression = this.modifyNode(bracedExpressionNode.expression()); return bracedExpressionNode.modify() .withOpenParen(formatToken(openParen, 1, 0, 0, 0)) .withCloseParen(formatToken(closeParen, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public TypeTestExpressionNode transform(TypeTestExpressionNode typeTestExpressionNode) { if (!isInLineRange(typeTestExpressionNode)) { return typeTestExpressionNode; } ExpressionNode expression = this.modifyNode(typeTestExpressionNode.expression()); Node typeDescriptor = this.modifyNode(typeTestExpressionNode.typeDescriptor()); Token isToken = getToken(typeTestExpressionNode.isKeyword()); return typeTestExpressionNode.modify() .withExpression(expression) .withIsKeyword(formatToken(isToken, 1, 1, 0, 0)) .withTypeDescriptor(typeDescriptor) .apply(); } @Override public ErrorTypeDescriptorNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) { if (!isInLineRange(errorTypeDescriptorNode)) { return errorTypeDescriptorNode; } Token errorKeywordToken = getToken(errorTypeDescriptorNode.errorKeywordToken()); ErrorTypeParamsNode errorTypeParamsNode = this.modifyNode(errorTypeDescriptorNode.errorTypeParamsNode().orElse(null)); if (errorTypeParamsNode != null) { errorTypeDescriptorNode = errorTypeDescriptorNode.modify() .withErrorTypeParamsNode(errorTypeParamsNode).apply(); } return errorTypeDescriptorNode.modify() .withErrorKeywordToken(formatToken(errorKeywordToken, 0, 0, 0, 0)) .apply(); } @Override public ModuleVariableDeclarationNode transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { if (!isInLineRange(moduleVariableDeclarationNode)) { return moduleVariableDeclarationNode; } Token equalsToken = getToken(moduleVariableDeclarationNode.equalsToken()); Token semicolonToken = getToken(moduleVariableDeclarationNode.semicolonToken()); Token finalKeyword = getToken(moduleVariableDeclarationNode.finalKeyword().orElse(null)); MetadataNode metadata = this.modifyNode(moduleVariableDeclarationNode.metadata().orElse(null)); ExpressionNode initializer = this.modifyNode(moduleVariableDeclarationNode.initializer()); if (metadata != null) { moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify() .withMetadata(metadata).apply(); } if (finalKeyword != null) { moduleVariableDeclarationNode = moduleVariableDeclarationNode.modify() .withFinalKeyword(formatToken(finalKeyword, 0, 1, 0, 0)).apply(); } return moduleVariableDeclarationNode.modify() .withTypedBindingPattern(this.modifyNode(moduleVariableDeclarationNode.typedBindingPattern())) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withInitializer(initializer) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 2)) .apply(); } @Override public ConstantDeclarationNode transform(ConstantDeclarationNode constantDeclarationNode) { if (!isInLineRange(constantDeclarationNode)) { return constantDeclarationNode; } Token constKeyword = getToken(constantDeclarationNode.constKeyword()); Token variableName = getToken(constantDeclarationNode.variableName()); Token equalsToken = getToken(constantDeclarationNode.equalsToken()); Token semicolonToken = getToken(constantDeclarationNode.semicolonToken()); Token visibilityQualifier = getToken(constantDeclarationNode.visibilityQualifier().orElse(null)); Node initializer = this.modifyNode(constantDeclarationNode.initializer()); MetadataNode metadata = this.modifyNode(constantDeclarationNode.metadata().orElse(null)); TypeDescriptorNode typeDescriptorNode = this.modifyNode(constantDeclarationNode.typeDescriptor().orElse(null)); if (metadata != null) { constantDeclarationNode = constantDeclarationNode.modify() .withMetadata(metadata).apply(); } return constantDeclarationNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0)) .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withInitializer(initializer) .withSemicolonToken(formatToken(semicolonToken, 1, 1, 0, 1)) .withTypeDescriptor(typeDescriptorNode) .withVariableName(variableName) .apply(); } @Override public MetadataNode transform(MetadataNode metadataNode) { if (!isInLineRange(metadataNode)) { return metadataNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(metadataNode.annotations()); Node documentationString = metadataNode.documentationString().orElse(null); if (documentationString != null) { metadataNode = metadataNode.modify() .withDocumentationString(this.modifyNode(documentationString)).apply(); } return metadataNode.modify() .withAnnotations(annotations) .apply(); } @Override public BlockStatementNode transform(BlockStatementNode blockStatementNode) { if (!isInLineRange(blockStatementNode)) { return blockStatementNode; } int startColumn = getStartColumn(blockStatementNode, blockStatementNode.kind(), false); Token openBraceToken = getToken(blockStatementNode.openBraceToken()); Token closeBraceToken = getToken(blockStatementNode.closeBraceToken()); NodeList<StatementNode> statements = this.modifyNodeList(blockStatementNode.statements()); int trailingNewLines = 1; if (blockStatementNode.parent() != null && blockStatementNode.parent().kind() == SyntaxKind.IF_ELSE_STATEMENT) { IfElseStatementNode ifElseStatementNode = (IfElseStatementNode) blockStatementNode.parent(); if (ifElseStatementNode.elseBody().isPresent()) { trailingNewLines = 0; } } return blockStatementNode.modify() .withOpenBraceToken(formatToken(openBraceToken, 1, 0, 0, 1)) .withCloseBraceToken(formatToken(closeBraceToken, startColumn, 0, 0, trailingNewLines)) .withStatements(statements) .apply(); } @Override public MappingConstructorExpressionNode transform( MappingConstructorExpressionNode mappingConstructorExpressionNode) { if (!isInLineRange(mappingConstructorExpressionNode)) { return mappingConstructorExpressionNode; } int startColumn = getStartColumn(mappingConstructorExpressionNode, mappingConstructorExpressionNode.kind(), false); Token openBrace = getToken(mappingConstructorExpressionNode.openBrace()); Token closeBrace = getToken(mappingConstructorExpressionNode.closeBrace()); SeparatedNodeList<MappingFieldNode> fields = this.modifySeparatedNodeList( mappingConstructorExpressionNode.fields()); return mappingConstructorExpressionNode.modify() .withOpenBrace(formatToken(openBrace, 0, 0, 0, 1)) .withCloseBrace(formatToken(closeBrace, startColumn, 0, 1, 0)) .withFields(fields) .apply(); } @Override public ListenerDeclarationNode transform(ListenerDeclarationNode listenerDeclarationNode) { if (!isInLineRange(listenerDeclarationNode)) { return listenerDeclarationNode; } Token equalsToken = getToken(listenerDeclarationNode.equalsToken()); Token variableName = getToken(listenerDeclarationNode.variableName()); Token semicolonToken = getToken(listenerDeclarationNode.semicolonToken()); Token listenerKeyword = getToken(listenerDeclarationNode.listenerKeyword()); Token visibilityQualifier = getToken(listenerDeclarationNode.visibilityQualifier().orElse(null)); Node initializer = this.modifyNode(listenerDeclarationNode.initializer()); MetadataNode metadata = this.modifyNode(listenerDeclarationNode.metadata().orElse(null)); Node typeDescriptor = this.modifyNode(listenerDeclarationNode.typeDescriptor()); if (visibilityQualifier != null) { listenerDeclarationNode = listenerDeclarationNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 0, 0, 0)).apply(); } if (metadata != null) { listenerDeclarationNode = listenerDeclarationNode.modify() .withMetadata(metadata).apply(); } return listenerDeclarationNode.modify() .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withInitializer(initializer) .withListenerKeyword(formatToken(listenerKeyword, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .withTypeDescriptor(typeDescriptor) .withVariableName(formatToken(variableName, 0, 0, 0, 0)) .apply(); } @Override public SpecificFieldNode transform(SpecificFieldNode specificFieldNode) { if (!isInLineRange(specificFieldNode)) { return specificFieldNode; } int startColumn = getStartColumn(specificFieldNode, specificFieldNode.kind(), true); Token fieldName = getToken((Token) specificFieldNode.fieldName()); Token readOnlyKeyword = specificFieldNode.readonlyKeyword().orElse(null); Token colon = getToken(specificFieldNode.colon().orElse(null)); ExpressionNode expressionNode = this.modifyNode(specificFieldNode.valueExpr().orElse(null)); if (readOnlyKeyword != null) { specificFieldNode = specificFieldNode.modify() .withReadonlyKeyword(formatToken(readOnlyKeyword, 0, 0, 0, 0)).apply(); } return specificFieldNode.modify() .withFieldName(formatToken(fieldName, startColumn, 0, 0, 0)) .withColon(formatToken(colon, 0, 1, 0, 0)) .withValueExpr(expressionNode) .apply(); } @Override public BinaryExpressionNode transform(BinaryExpressionNode binaryExpressionNode) { if (!isInLineRange(binaryExpressionNode)) { return binaryExpressionNode; } Node lhsExpr = this.modifyNode(binaryExpressionNode.lhsExpr()); Node rhsExpr = this.modifyNode(binaryExpressionNode.rhsExpr()); Token operator = getToken(binaryExpressionNode.operator()); return binaryExpressionNode.modify() .withLhsExpr(lhsExpr) .withRhsExpr(rhsExpr) .withOperator(formatToken(operator, 1, 1, 0, 0)) .apply(); } @Override public ArrayTypeDescriptorNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) { if (!isInLineRange(arrayTypeDescriptorNode)) { return arrayTypeDescriptorNode; } Node arrayLength = arrayTypeDescriptorNode.arrayLength().orElse(null); Token openBracket = getToken(arrayTypeDescriptorNode.openBracket()); Token closeBracket = getToken(arrayTypeDescriptorNode.closeBracket()); TypeDescriptorNode memberTypeDesc = this.modifyNode(arrayTypeDescriptorNode.memberTypeDesc()); if (arrayLength != null) { arrayTypeDescriptorNode = arrayTypeDescriptorNode.modify() .withArrayLength(this.modifyNode(arrayLength)).apply(); } return arrayTypeDescriptorNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .withMemberTypeDesc(memberTypeDesc) .apply(); } @Override public AssignmentStatementNode transform(AssignmentStatementNode assignmentStatementNode) { if (!isInLineRange(assignmentStatementNode)) { return assignmentStatementNode; } Node varRef = this.modifyNode(assignmentStatementNode.varRef()); ExpressionNode expression = this.modifyNode(assignmentStatementNode.expression()); Token equalsToken = getToken(assignmentStatementNode.equalsToken()); Token semicolonToken = getToken(assignmentStatementNode.semicolonToken()); return assignmentStatementNode.modify() .withVarRef(varRef) .withExpression(expression) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .apply(); } @Override public IndexedExpressionNode transform(IndexedExpressionNode indexedExpressionNode) { if (!isInLineRange(indexedExpressionNode)) { return indexedExpressionNode; } SeparatedNodeList<ExpressionNode> keyExpression = this.modifySeparatedNodeList( indexedExpressionNode.keyExpression()); ExpressionNode containerExpression = this.modifyNode(indexedExpressionNode.containerExpression()); Token openBracket = getToken(indexedExpressionNode.openBracket()); Token closeBracket = getToken(indexedExpressionNode.closeBracket()); return indexedExpressionNode.modify() .withKeyExpression(keyExpression) .withContainerExpression(containerExpression) .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public CheckExpressionNode transform(CheckExpressionNode checkExpressionNode) { if (!isInLineRange(checkExpressionNode)) { return checkExpressionNode; } int startColumn = getStartColumn(checkExpressionNode, checkExpressionNode.kind(), false); Token checkKeyword = getToken(checkExpressionNode.checkKeyword()); ExpressionNode expressionNode = this.modifyNode(checkExpressionNode.expression()); return checkExpressionNode.modify() .withCheckKeyword(formatToken(checkKeyword, startColumn, 1, 0, 0)) .withExpression(expressionNode) .apply(); } @Override public WhileStatementNode transform(WhileStatementNode whileStatementNode) { if (!isInLineRange(whileStatementNode)) { return whileStatementNode; } int startColumn = getStartColumn(whileStatementNode, whileStatementNode.kind(), true); Token whileKeyword = getToken(whileStatementNode.whileKeyword()); ExpressionNode condition = this.modifyNode(whileStatementNode.condition()); BlockStatementNode whileBody = this.modifyNode(whileStatementNode.whileBody()); return whileStatementNode.modify() .withWhileKeyword(formatToken(whileKeyword, startColumn, 0, 0, 0)) .withCondition(condition) .withWhileBody(whileBody) .apply(); } @Override public ReturnStatementNode transform(ReturnStatementNode returnStatementNode) { if (!isInLineRange(returnStatementNode)) { return returnStatementNode; } int startColumn = getStartColumn(returnStatementNode, returnStatementNode.kind(), true); Token returnKeyword = getToken(returnStatementNode.returnKeyword()); ExpressionNode expressionNode = returnStatementNode.expression().orElse(null); Token semicolonToken = getToken(returnStatementNode.semicolonToken()); if (expressionNode != null) { returnStatementNode = returnStatementNode.modify() .withExpression(this.modifyNode(expressionNode)).apply(); } return returnStatementNode.modify() .withReturnKeyword(formatToken(returnKeyword, startColumn, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 1)) .apply(); } @Override public MethodCallExpressionNode transform(MethodCallExpressionNode methodCallExpressionNode) { if (!isInLineRange(methodCallExpressionNode)) { return methodCallExpressionNode; } SeparatedNodeList<FunctionArgumentNode> arguments = this.modifySeparatedNodeList(methodCallExpressionNode .arguments()); Token openParenToken = getToken(methodCallExpressionNode.openParenToken()); Token closeParenToken = getToken(methodCallExpressionNode.closeParenToken()); Token dotToken = getToken(methodCallExpressionNode.dotToken()); ExpressionNode expression = this.modifyNode(methodCallExpressionNode.expression()); NameReferenceNode methodName = this.modifyNode(methodCallExpressionNode.methodName()); return methodCallExpressionNode.modify() .withArguments(arguments) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .withDotToken(formatToken(dotToken, 0, 0, 0, 0)) .withExpression(expression) .withMethodName(methodName) .apply(); } @Override public NilLiteralNode transform(NilLiteralNode nilLiteralNode) { Token openParenToken = getToken(nilLiteralNode.openParenToken()); Token closeParenToken = getToken(nilLiteralNode.closeParenToken()); return nilLiteralNode.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public NilTypeDescriptorNode transform(NilTypeDescriptorNode nilTypeDescriptorNode) { Token openParenToken = getToken(nilTypeDescriptorNode.openParenToken()); Token closeParenToken = getToken(nilTypeDescriptorNode.closeParenToken()); return nilTypeDescriptorNode.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public UnionTypeDescriptorNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) { TypeDescriptorNode leftTypeDesc = this.modifyNode(unionTypeDescriptorNode.leftTypeDesc()); Token pipeToken = getToken(unionTypeDescriptorNode.pipeToken()); TypeDescriptorNode rightTypeDesc = this.modifyNode(unionTypeDescriptorNode.rightTypeDesc()); return unionTypeDescriptorNode.modify() .withLeftTypeDesc(leftTypeDesc) .withPipeToken(pipeToken) .withRightTypeDesc(rightTypeDesc) .apply(); } @Override @Override public ModuleXMLNamespaceDeclarationNode transform( ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) { Token xmlnsKeyword = getToken(moduleXMLNamespaceDeclarationNode.xmlnsKeyword()); ExpressionNode namespaceuri = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespaceuri()); Token asKeyword = getToken(moduleXMLNamespaceDeclarationNode.asKeyword()); IdentifierToken namespacePrefix = this.modifyNode(moduleXMLNamespaceDeclarationNode.namespacePrefix()); Token semicolonToken = getToken(moduleXMLNamespaceDeclarationNode.semicolonToken()); return moduleXMLNamespaceDeclarationNode.modify() .withNamespacePrefix(namespacePrefix) .withNamespaceuri(namespaceuri) .withXmlnsKeyword(formatToken(xmlnsKeyword, 0, 0, 0, 0)) .withAsKeyword(formatToken(asKeyword, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public XmlTypeDescriptorNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) { int startColumn = getStartColumn(xmlTypeDescriptorNode, xmlTypeDescriptorNode.kind(), true); Token xmlKeywordToken = getToken(xmlTypeDescriptorNode.xmlKeywordToken()); TypeParameterNode xmlTypeParamsNode = this.modifyNode(xmlTypeDescriptorNode.xmlTypeParamsNode().orElse(null)); if (xmlTypeParamsNode != null) { xmlTypeDescriptorNode = xmlTypeDescriptorNode.modify() .withXmlTypeParamsNode(xmlTypeParamsNode).apply(); } return xmlTypeDescriptorNode.modify() .withXmlKeywordToken(formatToken(xmlKeywordToken, startColumn, 0, 0, 0)) .apply(); } @Override public XMLElementNode transform(XMLElementNode xMLElementNode) { XMLStartTagNode startTag = this.modifyNode(xMLElementNode.startTag()); NodeList<XMLItemNode> content = modifyNodeList(xMLElementNode.content()); XMLEndTagNode endTag = this.modifyNode(xMLElementNode.endTag()); return xMLElementNode.modify() .withStartTag(startTag) .withEndTag(endTag) .withContent(content) .apply(); } @Override public XMLStartTagNode transform(XMLStartTagNode xMLStartTagNode) { Token ltToken = getToken(xMLStartTagNode.ltToken()); XMLNameNode name = this.modifyNode(xMLStartTagNode.name()); NodeList<XMLAttributeNode> attributes = modifyNodeList(xMLStartTagNode.attributes()); Token getToken = getToken(xMLStartTagNode.getToken()); return xMLStartTagNode.modify() .withName(name) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withAttributes(attributes) .withGetToken(formatToken(getToken, 0, 0, 0, 0)) .apply(); } @Override public XMLEndTagNode transform(XMLEndTagNode xMLEndTagNode) { Token ltToken = getToken(xMLEndTagNode.ltToken()); Token slashToken = getToken(xMLEndTagNode.slashToken()); XMLNameNode name = this.modifyNode(xMLEndTagNode.name()); Token getToken = getToken(xMLEndTagNode.getToken()); return xMLEndTagNode.modify() .withName(name) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withSlashToken(formatToken(slashToken, 0, 0, 0, 0)) .withGetToken(formatToken(getToken, 0, 0, 0, 0)) .apply(); } @Override public XMLSimpleNameNode transform(XMLSimpleNameNode xMLSimpleNameNode) { Token name = getToken(xMLSimpleNameNode.name()); if (xMLSimpleNameNode.parent().kind() == SyntaxKind.XML_PI && ((XMLProcessingInstruction) xMLSimpleNameNode.parent()).data() != null) { return xMLSimpleNameNode.modify() .withName(formatToken(name, 0, 1, 0, 0)) .apply(); } return xMLSimpleNameNode.modify() .withName(formatToken(name, 0, 0, 0, 0)) .apply(); } @Override public XMLQualifiedNameNode transform(XMLQualifiedNameNode xMLQualifiedNameNode) { XMLSimpleNameNode prefix = this.modifyNode(xMLQualifiedNameNode.prefix()); Token colon = getToken(xMLQualifiedNameNode.colon()); XMLSimpleNameNode name = this.modifyNode(xMLQualifiedNameNode.name()); return xMLQualifiedNameNode.modify() .withPrefix(prefix) .withName(name) .withColon(formatToken(colon, 0, 0, 0, 0)) .apply(); } @Override public XMLEmptyElementNode transform(XMLEmptyElementNode xMLEmptyElementNode) { Token ltToken = getToken(xMLEmptyElementNode.ltToken()); XMLNameNode name = this.modifyNode(xMLEmptyElementNode.name()); NodeList<XMLAttributeNode> attributes = this.modifyNodeList(xMLEmptyElementNode.attributes()); Token slashToken = getToken(xMLEmptyElementNode.slashToken()); Token getToken = getToken(xMLEmptyElementNode.getToken()); return xMLEmptyElementNode.modify() .withName(name) .withAttributes(attributes) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withSlashToken(formatToken(slashToken, 0, 0, 0, 0)) .withGetToken(formatToken(getToken, 0, 0, 0, 0)) .apply(); } @Override public XMLTextNode transform(XMLTextNode xMLTextNode) { Token content = getToken(xMLTextNode.content()); return xMLTextNode.modify() .withContent(formatToken(content, 0, 0, 0, 0)) .apply(); } @Override public XMLAttributeNode transform(XMLAttributeNode xMLAttributeNode) { XMLNameNode attributeName = this.modifyNode(xMLAttributeNode.attributeName()); Token equalToken = getToken(xMLAttributeNode.equalToken()); XMLAttributeValue value = this.modifyNode(xMLAttributeNode.value()); return xMLAttributeNode.modify() .withValue(value) .withAttributeName(attributeName) .withEqualToken(formatToken(equalToken, 0, 0, 0, 0)) .apply(); } @Override public XMLAttributeValue transform(XMLAttributeValue xMLAttributeValue) { Token startQuote = getToken(xMLAttributeValue.startQuote()); NodeList<Node> value = this.modifyNodeList(xMLAttributeValue.value()); Token endQuote = getToken(xMLAttributeValue.endQuote()); return xMLAttributeValue.modify() .withStartQuote(formatToken(startQuote, 0, 0, 0, 0)) .withValue(value) .withEndQuote(formatToken(endQuote, 0, 0, 0, 0)) .apply(); } @Override public XMLComment transform(XMLComment xMLComment) { Token commentStart = getToken(xMLComment.commentStart()); NodeList<Node> content = this.modifyNodeList(xMLComment.content()); Token commentEnd = getToken(xMLComment.commentEnd()); return xMLComment.modify() .withCommentStart(formatToken(commentStart, 0, 0, 0, 0)) .withContent(content) .withCommentEnd(formatToken(commentEnd, 0, 0, 0, 0)) .apply(); } @Override public XMLProcessingInstruction transform(XMLProcessingInstruction xMLProcessingInstruction) { Token piStart = getToken(xMLProcessingInstruction.piStart()); XMLNameNode target = this.modifyNode(xMLProcessingInstruction.target()); NodeList<Node> data = this.modifyNodeList(xMLProcessingInstruction.data()); Token piEnd = getToken(xMLProcessingInstruction.piEnd()); return xMLProcessingInstruction.modify() .withTarget(target) .withPiStart(formatToken(piStart, 0, 0, 0, 0)) .withData(data) .withPiEnd(formatToken(piEnd, 0, 0, 0, 0)) .apply(); } @Override public XMLFilterExpressionNode transform(XMLFilterExpressionNode xMLFilterExpressionNode) { ExpressionNode expression = this.modifyNode(xMLFilterExpressionNode.expression()); XMLNamePatternChainingNode xmlPatternChain = this.modifyNode(xMLFilterExpressionNode.xmlPatternChain()); return xMLFilterExpressionNode.modify() .withExpression(expression) .withXmlPatternChain(xmlPatternChain) .apply(); } @Override public XMLStepExpressionNode transform(XMLStepExpressionNode xMLStepExpressionNode) { ExpressionNode expression = this.modifyNode(xMLStepExpressionNode.expression()); Node xmlStepStart = this.modifyNode(xMLStepExpressionNode.xmlStepStart()); return xMLStepExpressionNode.modify() .withExpression(expression) .withXmlStepStart(xmlStepStart) .apply(); } @Override public XMLNamePatternChainingNode transform(XMLNamePatternChainingNode xMLNamePatternChainingNode) { Token startToken = getToken(xMLNamePatternChainingNode.startToken()); SeparatedNodeList<Node> xmlNamePattern = modifySeparatedNodeList(xMLNamePatternChainingNode.xmlNamePattern()); Token gtToken = getToken(xMLNamePatternChainingNode.gtToken()); return xMLNamePatternChainingNode.modify() .withStartToken(formatToken(startToken, 0, 0, 0, 0)) .withXmlNamePattern(xmlNamePattern) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public XMLAtomicNamePatternNode transform(XMLAtomicNamePatternNode xMLAtomicNamePatternNode) { Token prefix = getToken(xMLAtomicNamePatternNode.prefix()); Token colon = getToken(xMLAtomicNamePatternNode.colon()); Token name = getToken(xMLAtomicNamePatternNode.name()); return xMLAtomicNamePatternNode.modify() .withPrefix(formatToken(prefix, 0, 0, 0, 0)) .withColon(formatToken(colon, 0, 0, 0, 0)) .withName(formatToken(name, 0, 0, 0, 0)) .apply(); } @Override public TemplateExpressionNode transform(TemplateExpressionNode templateExpressionNode) { Token type = getToken(templateExpressionNode.type().orElse(null)); Token startBacktick = getToken(templateExpressionNode.startBacktick()); NodeList<TemplateMemberNode> content = modifyNodeList(templateExpressionNode.content()); Token endBacktick = getToken(templateExpressionNode.endBacktick()); return templateExpressionNode.modify() .withStartBacktick(formatToken(startBacktick, 1, 0, 0, 0)) .withContent(content) .withType(formatToken(type, 0, 0, 0, 0)) .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0)) .apply(); } @Override public ByteArrayLiteralNode transform(ByteArrayLiteralNode byteArrayLiteralNode) { Token type = getToken(byteArrayLiteralNode.type()); Token startBacktick = getToken(byteArrayLiteralNode.startBacktick()); Token content = getToken(byteArrayLiteralNode.content().orElse(null)); Token endBacktick = getToken(byteArrayLiteralNode.endBacktick()); if (content != null) { byteArrayLiteralNode = byteArrayLiteralNode.modify() .withContent(formatToken(content, 0, 0, 0, 0)).apply(); } return byteArrayLiteralNode.modify() .withType(formatToken(type, 0, 0, 0, 0)) .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0)) .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0)) .apply(); } @Override public ListConstructorExpressionNode transform(ListConstructorExpressionNode listConstructorExpressionNode) { Token openBracket = getToken(listConstructorExpressionNode.openBracket()); SeparatedNodeList<Node> expressions = this.modifySeparatedNodeList(listConstructorExpressionNode.expressions()); Token closeBracket = getToken(listConstructorExpressionNode.closeBracket()); return listConstructorExpressionNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withExpressions(expressions) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public TypeReferenceNode transform(TypeReferenceNode typeReferenceNode) { Token asteriskToken = getToken(typeReferenceNode.asteriskToken()); Node typeName = this.modifyNode(typeReferenceNode.typeName()); Token semicolonToken = getToken(typeReferenceNode.semicolonToken()); return typeReferenceNode.modify() .withTypeName(typeName) .withAsteriskToken(formatToken(asteriskToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public TupleTypeDescriptorNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) { int startCol = getStartColumn(tupleTypeDescriptorNode, tupleTypeDescriptorNode.kind(), true); Token openBracketToken = getToken(tupleTypeDescriptorNode.openBracketToken()); SeparatedNodeList<Node> memberTypeDesc = this.modifySeparatedNodeList(tupleTypeDescriptorNode.memberTypeDesc()); Token closeBracketToken = getToken(tupleTypeDescriptorNode.closeBracketToken()); return tupleTypeDescriptorNode.modify() .withOpenBracketToken(formatToken(openBracketToken, startCol, 0, 0, 0)) .withMemberTypeDesc(memberTypeDesc) .withCloseBracketToken(formatToken(closeBracketToken, 0, 0, 0, 0)) .apply(); } @Override public MappingMatchPatternNode transform(MappingMatchPatternNode mappingMatchPatternNode) { Token openBraceToken = getToken(mappingMatchPatternNode.openBraceToken()); SeparatedNodeList<FieldMatchPatternNode> fieldMatchPatterns = this.modifySeparatedNodeList(mappingMatchPatternNode.fieldMatchPatterns()); RestMatchPatternNode restMatchPattern = this.modifyNode(mappingMatchPatternNode.restMatchPattern().orElse(null)); Token closeBraceToken = getToken(mappingMatchPatternNode.closeBraceToken()); if (restMatchPattern != null) { mappingMatchPatternNode = mappingMatchPatternNode.modify() .withRestMatchPattern(restMatchPattern).apply(); } return mappingMatchPatternNode.modify() .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0)) .withFieldMatchPatterns(fieldMatchPatterns) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0)) .apply(); } @Override public ParameterizedTypeDescriptorNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescriptorNode) { int startCol = getStartColumn(parameterizedTypeDescriptorNode, parameterizedTypeDescriptorNode.kind(), true); Token parameterizedType = getToken(parameterizedTypeDescriptorNode.parameterizedType()); TypeParameterNode typeParameter = this.modifyNode(parameterizedTypeDescriptorNode.typeParameter()); return parameterizedTypeDescriptorNode.modify() .withParameterizedType(formatToken(parameterizedType, startCol, 0, 0, 0)) .withTypeParameter(typeParameter) .apply(); } @Override public TypeParameterNode transform(TypeParameterNode typeParameterNode) { Token ltToken = getToken(typeParameterNode.ltToken()); TypeDescriptorNode typeNode = this.modifyNode(typeParameterNode.typeNode()); Token gtToken = getToken(typeParameterNode.gtToken()); return typeParameterNode.modify() .withTypeNode(typeNode) .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public StartActionNode transform(StartActionNode startActionNode) { if (!isInLineRange(startActionNode)) { return startActionNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(startActionNode.annotations()); Token startKeyword = getToken(startActionNode.startKeyword()); ExpressionNode expression = this.modifyNode(startActionNode.expression()); return startActionNode.modify() .withAnnotations(annotations) .withStartKeyword(formatToken(startKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public FlushActionNode transform(FlushActionNode flushActionNode) { if (!isInLineRange(flushActionNode)) { return flushActionNode; } Token flushKeyword = getToken(flushActionNode.flushKeyword()); NameReferenceNode peerWorker = this.modifyNode(flushActionNode.peerWorker()); return flushActionNode.modify() .withFlushKeyword(formatToken(flushKeyword, 0, 1, 0, 0)) .withPeerWorker(peerWorker) .apply(); } @Override public NamedWorkerDeclarationNode transform(NamedWorkerDeclarationNode namedWorkerDeclarationNode) { if (!isInLineRange(namedWorkerDeclarationNode)) { return namedWorkerDeclarationNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(namedWorkerDeclarationNode.annotations()); Token workerKeyword = getToken(namedWorkerDeclarationNode.workerKeyword()); IdentifierToken workerName = this.modifyNode(namedWorkerDeclarationNode.workerName()); Node returnTypeDesc = this.modifyNode(namedWorkerDeclarationNode.returnTypeDesc().orElse(null)); BlockStatementNode workerBody = this.modifyNode(namedWorkerDeclarationNode.workerBody()); if (returnTypeDesc != null) { namedWorkerDeclarationNode = namedWorkerDeclarationNode.modify() .withReturnTypeDesc(returnTypeDesc).apply(); } return namedWorkerDeclarationNode.modify() .withAnnotations(annotations) .withWorkerKeyword(formatToken(workerKeyword, 0, 0, 0, 0)) .withWorkerName(workerName) .withWorkerBody(workerBody) .apply(); } @Override public TypeDefinitionNode transform(TypeDefinitionNode typeDefinitionNode) { if (!isInLineRange(typeDefinitionNode)) { return typeDefinitionNode; } MetadataNode metadata = this.modifyNode(typeDefinitionNode.metadata().orElse(null)); Token visibilityQualifier = getToken(typeDefinitionNode.visibilityQualifier().orElse(null)); Token typeKeyword = getToken(typeDefinitionNode.typeKeyword()); Token typeName = getToken(typeDefinitionNode.typeName()); Node typeDescriptor = this.modifyNode(typeDefinitionNode.typeDescriptor()); Token semicolonToken = this.modifyToken(typeDefinitionNode.semicolonToken()); if (metadata != null) { typeDefinitionNode = typeDefinitionNode.modify() .withMetadata(metadata).apply(); } if (visibilityQualifier != null) { typeDefinitionNode = typeDefinitionNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 1, 1, 0, 0)).apply(); } return typeDefinitionNode.modify() .withTypeKeyword(formatToken(typeKeyword, 1, 1, 0, 0)) .withTypeName(formatToken(typeName, 1, 1, 0, 0)) .withTypeDescriptor(typeDescriptor) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public CompoundAssignmentStatementNode transform(CompoundAssignmentStatementNode compoundAssignmentStatementNode) { if (!isInLineRange(compoundAssignmentStatementNode)) { return compoundAssignmentStatementNode; } ExpressionNode lhsExpression = this.modifyNode(compoundAssignmentStatementNode.lhsExpression()); Token binaryOperator = getToken(compoundAssignmentStatementNode.binaryOperator()); Token equalsToken = getToken(compoundAssignmentStatementNode.equalsToken()); ExpressionNode rhsExpression = this.modifyNode(compoundAssignmentStatementNode.rhsExpression()); Token semicolonToken = getToken(compoundAssignmentStatementNode.semicolonToken()); return compoundAssignmentStatementNode.modify() .withLhsExpression(lhsExpression) .withBinaryOperator(formatToken(binaryOperator, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withRhsExpression(rhsExpression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public BreakStatementNode transform(BreakStatementNode breakStatementNode) { if (!isInLineRange(breakStatementNode)) { return breakStatementNode; } Token breakToken = getToken(breakStatementNode.breakToken()); Token semicolonToken = getToken(breakStatementNode.semicolonToken()); return breakStatementNode.modify() .withBreakToken(formatToken(breakToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public ContinueStatementNode transform(ContinueStatementNode continueStatementNode) { if (!isInLineRange(continueStatementNode)) { return continueStatementNode; } Token continueToken = getToken(continueStatementNode.continueToken()); Token semicolonToken = getToken(continueStatementNode.semicolonToken()); return continueStatementNode.modify() .withContinueToken(formatToken(continueToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public ExternalFunctionBodyNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) { if (!isInLineRange(externalFunctionBodyNode)) { return externalFunctionBodyNode; } Token equalsToken = getToken(externalFunctionBodyNode.equalsToken()); NodeList<AnnotationNode> annotations = this.modifyNodeList(externalFunctionBodyNode.annotations()); Token externalKeyword = getToken(externalFunctionBodyNode.externalKeyword()); Token semicolonToken = getToken(externalFunctionBodyNode.semicolonToken()); return externalFunctionBodyNode.modify() .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withAnnotations(annotations) .withExternalKeyword(formatToken(externalKeyword, 1, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 1, 0, 0, 0)) .apply(); } @Override public PanicStatementNode transform(PanicStatementNode panicStatementNode) { if (!isInLineRange(panicStatementNode)) { return panicStatementNode; } Token panicKeyword = getToken(panicStatementNode.panicKeyword()); ExpressionNode expression = this.modifyNode(panicStatementNode.expression()); Token semicolonToken = getToken(panicStatementNode.semicolonToken()); return panicStatementNode.modify() .withPanicKeyword(formatToken(panicKeyword, 1, 1, 0, 0)) .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public LocalTypeDefinitionStatementNode transform( LocalTypeDefinitionStatementNode localTypeDefinitionStatementNode) { if (!isInLineRange(localTypeDefinitionStatementNode)) { return localTypeDefinitionStatementNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(localTypeDefinitionStatementNode.annotations()); Token typeKeyword = getToken(localTypeDefinitionStatementNode.typeKeyword()); Node typeName = this.modifyNode(localTypeDefinitionStatementNode.typeName()); Node typeDescriptor = this.modifyNode(localTypeDefinitionStatementNode.typeDescriptor()); Token semicolonToken = getToken(localTypeDefinitionStatementNode.semicolonToken()); return localTypeDefinitionStatementNode.modify() .withAnnotations(annotations) .withTypeKeyword(formatToken(typeKeyword, 0, 1, 0, 0)) .withTypeName(typeName) .withTypeDescriptor(typeDescriptor) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public LockStatementNode transform(LockStatementNode lockStatementNode) { if (!isInLineRange(lockStatementNode)) { return lockStatementNode; } Token lockKeyword = getToken(lockStatementNode.lockKeyword()); StatementNode blockStatement = this.modifyNode(lockStatementNode.blockStatement()); return lockStatementNode.modify() .withLockKeyword(formatToken(lockKeyword, 0, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public ForkStatementNode transform(ForkStatementNode forkStatementNode) { if (!isInLineRange(forkStatementNode)) { return forkStatementNode; } Token forkKeyword = getToken(forkStatementNode.forkKeyword()); Token openBraceToken = getToken(forkStatementNode.openBraceToken()); NodeList<NamedWorkerDeclarationNode> namedWorkerDeclarations = this.modifyNodeList(forkStatementNode.namedWorkerDeclarations()); Token closeBraceToken = getToken(forkStatementNode.closeBraceToken()); return forkStatementNode.modify() .withForkKeyword(formatToken(forkKeyword, 1, 1, 0, 0)) .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0)) .withNamedWorkerDeclarations(namedWorkerDeclarations) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0)) .apply(); } @Override public ForEachStatementNode transform(ForEachStatementNode forEachStatementNode) { if (!isInLineRange(forEachStatementNode)) { return forEachStatementNode; } Token forEachKeyword = getToken(forEachStatementNode.forEachKeyword()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(forEachStatementNode.typedBindingPattern()); Token inKeyword = getToken(forEachStatementNode.inKeyword()); Node actionOrExpressionNode = this.modifyNode(forEachStatementNode.actionOrExpressionNode()); StatementNode blockStatement = this.modifyNode(forEachStatementNode.blockStatement()); return forEachStatementNode.modify() .withForEachKeyword(formatToken(forEachKeyword, 0, 1, 0, 0)) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0)) .withActionOrExpressionNode(actionOrExpressionNode) .withBlockStatement(blockStatement) .apply(); } @Override public FailExpressionNode transform(FailExpressionNode failExpressionNode) { if (!isInLineRange(failExpressionNode)) { return failExpressionNode; } Token failKeyword = getToken(failExpressionNode.failKeyword()); ExpressionNode expression = this.modifyNode(failExpressionNode.expression()); return failExpressionNode.modify() .withFailKeyword(formatToken(failKeyword, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public FieldAccessExpressionNode transform(FieldAccessExpressionNode fieldAccessExpressionNode) { if (!isInLineRange(fieldAccessExpressionNode)) { return fieldAccessExpressionNode; } ExpressionNode expression = this.modifyNode(fieldAccessExpressionNode.expression()); Token dotToken = getToken(fieldAccessExpressionNode.dotToken()); NameReferenceNode fieldName = this.modifyNode(fieldAccessExpressionNode.fieldName()); return fieldAccessExpressionNode.modify() .withExpression(expression) .withDotToken(formatToken(dotToken, 0, 0, 0, 0)) .withFieldName(fieldName) .apply(); } @Override public TypeofExpressionNode transform(TypeofExpressionNode typeofExpressionNode) { if (!isInLineRange(typeofExpressionNode)) { return typeofExpressionNode; } Token typeofKeyword = getToken(typeofExpressionNode.typeofKeyword()); ExpressionNode expression = this.modifyNode(typeofExpressionNode.expression()); return typeofExpressionNode.modify() .withTypeofKeyword(formatToken(typeofKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public UnaryExpressionNode transform(UnaryExpressionNode unaryExpressionNode) { if (!isInLineRange(unaryExpressionNode)) { return unaryExpressionNode; } Token unaryOperator = getToken(unaryExpressionNode.unaryOperator()); ExpressionNode expression = this.modifyNode(unaryExpressionNode.expression()); return unaryExpressionNode.modify() .withUnaryOperator(formatToken(unaryOperator, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public ComputedNameFieldNode transform(ComputedNameFieldNode computedNameFieldNode) { if (!isInLineRange(computedNameFieldNode)) { return computedNameFieldNode; } Token openBracket = getToken(computedNameFieldNode.openBracket()); ExpressionNode fieldNameExpr = this.modifyNode(computedNameFieldNode.fieldNameExpr()); Token closeBracket = getToken(computedNameFieldNode.closeBracket()); Token colonToken = getToken(computedNameFieldNode.colonToken()); ExpressionNode valueExpr = this.modifyNode(computedNameFieldNode.valueExpr()); return computedNameFieldNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withFieldNameExpr(fieldNameExpr) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .withColonToken(formatToken(colonToken, 1, 1, 0, 0)) .withValueExpr(valueExpr) .apply(); } @Override public DefaultableParameterNode transform(DefaultableParameterNode defaultableParameterNode) { if (!isInLineRange(defaultableParameterNode)) { return defaultableParameterNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(defaultableParameterNode.annotations()); Node typeName = this.modifyNode(defaultableParameterNode.typeName()); Token paramName = getToken(defaultableParameterNode.paramName().orElse(null)); Token equalsToken = getToken(defaultableParameterNode.equalsToken()); Node expression = this.modifyNode(defaultableParameterNode.expression()); if (paramName != null) { defaultableParameterNode = defaultableParameterNode.modify() .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply(); } return defaultableParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public RestParameterNode transform(RestParameterNode restParameterNode) { if (!isInLineRange(restParameterNode)) { return restParameterNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(restParameterNode.annotations()); Node typeName = this.modifyNode(restParameterNode.typeName()); Token ellipsisToken = getToken(restParameterNode.ellipsisToken()); Token paramName = getToken(restParameterNode.paramName().orElse(null)); if (paramName != null) { restParameterNode = restParameterNode.modify() .withParamName(formatToken(paramName, 1, 1, 0, 0)).apply(); } return restParameterNode.modify() .withAnnotations(annotations) .withTypeName(typeName) .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .apply(); } @Override public SpreadFieldNode transform(SpreadFieldNode spreadFieldNode) { if (!isInLineRange(spreadFieldNode)) { return spreadFieldNode; } Token ellipsis = getToken(spreadFieldNode.ellipsis()); ExpressionNode valueExpr = this.modifyNode(spreadFieldNode.valueExpr()); return spreadFieldNode.modify() .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0)) .withValueExpr(valueExpr) .apply(); } @Override public NamedArgumentNode transform(NamedArgumentNode namedArgumentNode) { if (!isInLineRange(namedArgumentNode)) { return namedArgumentNode; } SimpleNameReferenceNode argumentName = this.modifyNode(namedArgumentNode.argumentName()); Token equalsToken = getToken(namedArgumentNode.equalsToken()); ExpressionNode expression = this.modifyNode(namedArgumentNode.expression()); return namedArgumentNode.modify() .withArgumentName(argumentName) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public RestArgumentNode transform(RestArgumentNode restArgumentNode) { if (!isInLineRange(restArgumentNode)) { return restArgumentNode; } Token ellipsis = getToken(restArgumentNode.ellipsis()); ExpressionNode expression = this.modifyNode(restArgumentNode.expression()); return restArgumentNode.modify() .withEllipsis(formatToken(ellipsis, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public ObjectTypeDescriptorNode transform(ObjectTypeDescriptorNode objectTypeDescriptorNode) { if (!isInLineRange(objectTypeDescriptorNode)) { return objectTypeDescriptorNode; } NodeList<Token> objectTypeQualifiers = this.modifyNodeList(objectTypeDescriptorNode.objectTypeQualifiers()); Token objectKeyword = getToken(objectTypeDescriptorNode.objectKeyword()); Token openBrace = getToken(objectTypeDescriptorNode.openBrace()); NodeList<Node> members = this.modifyNodeList(objectTypeDescriptorNode.members()); Token closeBrace = getToken(objectTypeDescriptorNode.closeBrace()); return objectTypeDescriptorNode.modify() .withObjectTypeQualifiers(objectTypeQualifiers) .withObjectKeyword(formatToken(objectKeyword, 0, 1, 1, 0)) .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withMembers(members) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public RecordTypeDescriptorNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) { if (!isInLineRange(recordTypeDescriptorNode)) { return recordTypeDescriptorNode; } Token recordKeyword = getToken(recordTypeDescriptorNode.recordKeyword()); Token bodyStartDelimiter = getToken(recordTypeDescriptorNode.bodyStartDelimiter()); NodeList<Node> fields = this.modifyNodeList(recordTypeDescriptorNode.fields()); RecordRestDescriptorNode recordRestDescriptor = modifyNode(recordTypeDescriptorNode.recordRestDescriptor().orElse(null)); Token bodyEndDelimiter = getToken(recordTypeDescriptorNode.bodyEndDelimiter()); if (recordRestDescriptor != null) { recordTypeDescriptorNode = recordTypeDescriptorNode.modify() .withRecordRestDescriptor(recordRestDescriptor).apply(); } return recordTypeDescriptorNode.modify() .withRecordKeyword(formatToken(recordKeyword, 0, 1, 0, 0)) .withBodyStartDelimiter(formatToken(bodyStartDelimiter, 0, 0, 0, 0)) .withFields(fields) .withBodyEndDelimiter(formatToken(bodyEndDelimiter, 0, 0, 0, 0)) .apply(); } @Override public ObjectFieldNode transform(ObjectFieldNode objectFieldNode) { if (!isInLineRange(objectFieldNode)) { return objectFieldNode; } MetadataNode metadata = this.modifyNode(objectFieldNode.metadata().orElse(null)); Token visibilityQualifier = getToken(objectFieldNode.visibilityQualifier().orElse(null)); Token readonlyKeyword = getToken(objectFieldNode.readonlyKeyword().orElse(null)); Node typeName = this.modifyNode(objectFieldNode.typeName()); Token fieldName = getToken(objectFieldNode.fieldName()); Token equalsToken = getToken(objectFieldNode.equalsToken().orElse(null)); ExpressionNode expression = this.modifyNode(objectFieldNode.expression().orElse(null)); Token semicolonToken = getToken(objectFieldNode.semicolonToken()); if (metadata != null) { objectFieldNode = objectFieldNode.modify() .withMetadata(metadata).apply(); } if (visibilityQualifier != null) { objectFieldNode = objectFieldNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0)).apply(); } if (readonlyKeyword != null) { objectFieldNode = objectFieldNode.modify() .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply(); } return objectFieldNode.modify() .withTypeName(typeName) .withFieldName(formatToken(fieldName, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public RecordFieldNode transform(RecordFieldNode recordFieldNode) { if (!isInLineRange(recordFieldNode)) { return recordFieldNode; } MetadataNode metadata = this.modifyNode(recordFieldNode.metadata().orElse(null)); Token readonlyKeyword = getToken(recordFieldNode.readonlyKeyword().orElse(null)); Node typeName = this.modifyNode(recordFieldNode.typeName()); Token fieldName = getToken(recordFieldNode.fieldName()); Token questionMarkToken = getToken(recordFieldNode.questionMarkToken().orElse(null)); Token semicolonToken = getToken(recordFieldNode.semicolonToken()); if (metadata != null) { recordFieldNode = recordFieldNode.modify() .withMetadata(metadata).apply(); } if (readonlyKeyword != null) { recordFieldNode = recordFieldNode.modify() .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply(); } if (questionMarkToken != null) { recordFieldNode = recordFieldNode.modify() .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0)).apply(); } return recordFieldNode.modify() .withTypeName(typeName) .withFieldName(formatToken(fieldName, 0, 1, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public RecordFieldWithDefaultValueNode transform(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) { if (!isInLineRange(recordFieldWithDefaultValueNode)) { return recordFieldWithDefaultValueNode; } MetadataNode metadata = this.modifyNode(recordFieldWithDefaultValueNode.metadata().orElse(null)); Token readonlyKeyword = getToken(recordFieldWithDefaultValueNode.readonlyKeyword().orElse(null)); Node typeName = this.modifyNode(recordFieldWithDefaultValueNode.typeName()); Token fieldName = getToken(recordFieldWithDefaultValueNode.fieldName()); Token equalsToken = getToken(recordFieldWithDefaultValueNode.equalsToken()); ExpressionNode expression = this.modifyNode(recordFieldWithDefaultValueNode.expression()); Token semicolonToken = getToken(recordFieldWithDefaultValueNode.semicolonToken()); if (metadata != null) { recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify() .withMetadata(metadata).apply(); } if (readonlyKeyword != null) { recordFieldWithDefaultValueNode = recordFieldWithDefaultValueNode.modify() .withReadonlyKeyword(formatToken(readonlyKeyword, 0, 1, 0, 0)).apply(); } return recordFieldWithDefaultValueNode.modify() .withTypeName(typeName) .withFieldName(formatToken(fieldName, 1, 1, 0, 0)) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public RecordRestDescriptorNode transform(RecordRestDescriptorNode recordRestDescriptorNode) { if (!isInLineRange(recordRestDescriptorNode)) { return recordRestDescriptorNode; } Node typeName = this.modifyNode(recordRestDescriptorNode.typeName()); Token ellipsisToken = getToken(recordRestDescriptorNode.ellipsisToken()); Token semicolonToken = getToken(recordRestDescriptorNode.semicolonToken()); return recordRestDescriptorNode.modify() .withTypeName(typeName) .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public AnnotationNode transform(AnnotationNode annotationNode) { if (!isInLineRange(annotationNode)) { return annotationNode; } Token atToken = getToken(annotationNode.atToken()); Node annotReference = this.modifyNode(annotationNode.annotReference()); MappingConstructorExpressionNode annotValue = this.modifyNode(annotationNode.annotValue().orElse(null)); if (annotValue != null) { annotationNode = annotationNode.modify() .withAnnotValue(annotValue).apply(); } return annotationNode.modify() .withAtToken(formatToken(atToken, 1, 1, 0, 0)) .withAnnotReference(annotReference) .apply(); } @Override public AnnotationDeclarationNode transform(AnnotationDeclarationNode annotationDeclarationNode) { if (!isInLineRange(annotationDeclarationNode)) { return annotationDeclarationNode; } MetadataNode metadata = this.modifyNode(annotationDeclarationNode.metadata().orElse(null)); Token visibilityQualifier = getToken(annotationDeclarationNode.visibilityQualifier()); Token constKeyword = getToken(annotationDeclarationNode.constKeyword()); Token annotationKeyword = getToken(annotationDeclarationNode.annotationKeyword()); Node typeDescriptor = this.modifyNode(annotationDeclarationNode.typeDescriptor()); Token annotationTag = getToken(annotationDeclarationNode.annotationTag()); Token onKeyword = getToken(annotationDeclarationNode.onKeyword()); SeparatedNodeList<Node> attachPoints = this.modifySeparatedNodeList(annotationDeclarationNode.attachPoints()); Token semicolonToken = getToken(annotationDeclarationNode.semicolonToken()); if (metadata != null) { annotationDeclarationNode = annotationDeclarationNode.modify() .withMetadata(metadata).apply(); } return annotationDeclarationNode.modify() .withVisibilityQualifier(formatToken(visibilityQualifier, 0, 1, 0, 0)) .withConstKeyword(formatToken(constKeyword, 1, 1, 0, 0)) .withAnnotationKeyword(formatToken(annotationKeyword, 0, 0, 0, 0)) .withTypeDescriptor(typeDescriptor) .withAnnotationTag(formatToken(annotationTag, 0, 0, 0, 0)) .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0)) .withAttachPoints(attachPoints) .withSemicolonToken(formatToken(semicolonToken, 0, 0, 0, 0)) .apply(); } @Override public AnnotationAttachPointNode transform(AnnotationAttachPointNode annotationAttachPointNode) { if (!isInLineRange(annotationAttachPointNode)) { return annotationAttachPointNode; } Token sourceKeyword = getToken(annotationAttachPointNode.sourceKeyword()); Token firstIdent = getToken(annotationAttachPointNode.firstIdent()); Token secondIdent = getToken(annotationAttachPointNode.secondIdent()); return annotationAttachPointNode.modify() .withSourceKeyword(formatToken(sourceKeyword, 0, 1, 0, 0)) .withFirstIdent(formatToken(firstIdent, 0, 0, 0, 0)) .withSecondIdent(formatToken(secondIdent, 0, 0, 0, 0)) .apply(); } @Override public NamedWorkerDeclarator transform(NamedWorkerDeclarator namedWorkerDeclarator) { if (!isInLineRange(namedWorkerDeclarator)) { return namedWorkerDeclarator; } NodeList<StatementNode> workerInitStatements = this.modifyNodeList(namedWorkerDeclarator.workerInitStatements()); NodeList<NamedWorkerDeclarationNode> namedWorkerDeclarations = this.modifyNodeList(namedWorkerDeclarator.namedWorkerDeclarations()); return namedWorkerDeclarator.modify() .withNamedWorkerDeclarations(namedWorkerDeclarations) .withWorkerInitStatements(workerInitStatements) .apply(); } @Override public TrapExpressionNode transform(TrapExpressionNode trapExpressionNode) { if (!isInLineRange(trapExpressionNode)) { return trapExpressionNode; } Token trapKeyword = getToken(trapExpressionNode.trapKeyword()); ExpressionNode expression = this.modifyNode(trapExpressionNode.expression()); return trapExpressionNode.modify() .withTrapKeyword(formatToken(trapKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public TypeCastExpressionNode transform(TypeCastExpressionNode typeCastExpressionNode) { if (!isInLineRange(typeCastExpressionNode)) { return typeCastExpressionNode; } Token ltToken = getToken(typeCastExpressionNode.ltToken()); TypeCastParamNode typeCastParam = this.modifyNode(typeCastExpressionNode.typeCastParam()); Token gtToken = getToken(typeCastExpressionNode.gtToken()); ExpressionNode expression = this.modifyNode(typeCastExpressionNode.expression()); return typeCastExpressionNode.modify() .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withTypeCastParam(typeCastParam) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public TypeCastParamNode transform(TypeCastParamNode typeCastParamNode) { if (!isInLineRange(typeCastParamNode)) { return typeCastParamNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(typeCastParamNode.annotations()); Node type = this.modifyNode(typeCastParamNode.type()); return typeCastParamNode.modify() .withAnnotations(annotations) .withType(type) .apply(); } @Override public TableConstructorExpressionNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) { if (!isInLineRange(tableConstructorExpressionNode)) { return tableConstructorExpressionNode; } Token tableKeyword = getToken(tableConstructorExpressionNode.tableKeyword()); KeySpecifierNode keySpecifier = this.modifyNode(tableConstructorExpressionNode.keySpecifier().orElse(null)); Token openBracket = getToken(tableConstructorExpressionNode.openBracket()); SeparatedNodeList<Node> mappingConstructors = this.modifySeparatedNodeList(tableConstructorExpressionNode.mappingConstructors()); Token closeBracket = this.modifyToken(tableConstructorExpressionNode.closeBracket()); return tableConstructorExpressionNode.modify() .withTableKeyword(formatToken(tableKeyword, 0, 1, 0, 0)) .withKeySpecifier(keySpecifier) .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withMappingConstructors(mappingConstructors) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public KeySpecifierNode transform(KeySpecifierNode keySpecifierNode) { if (!isInLineRange(keySpecifierNode)) { return keySpecifierNode; } Token keyKeyword = getToken(keySpecifierNode.keyKeyword()); Token openParenToken = getToken(keySpecifierNode.openParenToken()); SeparatedNodeList<IdentifierToken> fieldNames = this.modifySeparatedNodeList(keySpecifierNode.fieldNames()); Token closeParenToken = getToken(keySpecifierNode.closeParenToken()); return keySpecifierNode.modify() .withKeyKeyword(formatToken(keyKeyword, 0, 1, 0, 0)) .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withFieldNames(fieldNames) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public ErrorTypeParamsNode transform(ErrorTypeParamsNode errorTypeParamsNode) { if (!isInLineRange(errorTypeParamsNode)) { return errorTypeParamsNode; } Token ltToken = getToken(errorTypeParamsNode.ltToken()); Node parameter = this.modifyNode(errorTypeParamsNode.parameter()); Token gtToken = getToken(errorTypeParamsNode.gtToken()); return errorTypeParamsNode.modify() .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withParameter(parameter) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public StreamTypeDescriptorNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) { if (!isInLineRange(streamTypeDescriptorNode)) { return streamTypeDescriptorNode; } Token streamKeywordToken = getToken(streamTypeDescriptorNode.streamKeywordToken()); Node streamTypeParamsNode = this.modifyNode(streamTypeDescriptorNode.streamTypeParamsNode().orElse(null)); if (streamTypeParamsNode != null) { streamTypeDescriptorNode = streamTypeDescriptorNode.modify() .withStreamTypeParamsNode(streamTypeParamsNode).apply(); } return streamTypeDescriptorNode.modify() .withStreamKeywordToken(formatToken(streamKeywordToken, 0, 1, 0, 0)) .apply(); } @Override public StreamTypeParamsNode transform(StreamTypeParamsNode streamTypeParamsNode) { if (!isInLineRange(streamTypeParamsNode)) { return streamTypeParamsNode; } Token ltToken = getToken(streamTypeParamsNode.ltToken()); Node leftTypeDescNode = this.modifyNode(streamTypeParamsNode.leftTypeDescNode()); Token commaToken = getToken(streamTypeParamsNode.commaToken().orElse(null)); Node rightTypeDescNode = this.modifyNode(streamTypeParamsNode.rightTypeDescNode().orElse(null)); Token gtToken = getToken(streamTypeParamsNode.gtToken()); if (commaToken != null) { streamTypeParamsNode = streamTypeParamsNode.modify() .withCommaToken(formatToken(commaToken, 0, 1, 0, 0)).apply(); } if (rightTypeDescNode != null) { streamTypeParamsNode = streamTypeParamsNode.modify() .withRightTypeDescNode(rightTypeDescNode).apply(); } return streamTypeParamsNode.modify() .withLtToken(formatToken(ltToken, 0, 0, 0, 0)) .withLeftTypeDescNode(leftTypeDescNode) .withGtToken(formatToken(gtToken, 0, 0, 0, 0)) .apply(); } @Override public TypedescTypeDescriptorNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) { if (!isInLineRange(typedescTypeDescriptorNode)) { return typedescTypeDescriptorNode; } Token typedescKeywordToken = this.modifyToken(typedescTypeDescriptorNode.typedescKeywordToken()); TypeParameterNode typedescTypeParamsNode = this.modifyNode(typedescTypeDescriptorNode.typedescTypeParamsNode().orElse(null)); if (typedescTypeParamsNode != null) { typedescTypeDescriptorNode = typedescTypeDescriptorNode.modify() .withTypedescTypeParamsNode(typedescTypeParamsNode).apply(); } return typedescTypeDescriptorNode.modify() .withTypedescKeywordToken(formatToken(typedescKeywordToken, 0, 1, 0, 0)) .apply(); } @Override public LetExpressionNode transform(LetExpressionNode letExpressionNode) { if (!isInLineRange(letExpressionNode)) { return letExpressionNode; } Token letKeyword = getToken(letExpressionNode.letKeyword()); SeparatedNodeList<LetVariableDeclarationNode> letVarDeclarations = this.modifySeparatedNodeList(letExpressionNode.letVarDeclarations()); Token inKeyword = getToken(letExpressionNode.inKeyword()); ExpressionNode expression = this.modifyNode(letExpressionNode.expression()); return letExpressionNode.modify() .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0)) .withLetVarDeclarations(letVarDeclarations) .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public LetVariableDeclarationNode transform(LetVariableDeclarationNode letVariableDeclarationNode) { if (!isInLineRange(letVariableDeclarationNode)) { return letVariableDeclarationNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(letVariableDeclarationNode.annotations()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(letVariableDeclarationNode.typedBindingPattern()); Token equalsToken = getToken(letVariableDeclarationNode.equalsToken()); ExpressionNode expression = this.modifyNode(letVariableDeclarationNode.expression()); return letVariableDeclarationNode.modify() .withAnnotations(annotations) .withTypedBindingPattern(typedBindingPattern) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public InterpolationNode transform(InterpolationNode interpolationNode) { if (!isInLineRange(interpolationNode)) { return interpolationNode; } Token interpolationStartToken = getToken(interpolationNode.interpolationStartToken()); ExpressionNode expression = this.modifyNode(interpolationNode.expression()); Token interpolationEndToken = getToken(interpolationNode.interpolationEndToken()); return interpolationNode.modify() .withInterpolationStartToken(formatToken(interpolationStartToken, 0, 0, 0, 0)) .withExpression(expression) .withInterpolationEndToken(formatToken(interpolationEndToken, 0, 0, 0, 0)) .apply(); } @Override public TableTypeDescriptorNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) { if (!isInLineRange(tableTypeDescriptorNode)) { return tableTypeDescriptorNode; } Token tableKeywordToken = getToken(tableTypeDescriptorNode.tableKeywordToken()); Node rowTypeParameterNode = this.modifyNode(tableTypeDescriptorNode.rowTypeParameterNode()); Node keyConstraintNode = this.modifyNode(tableTypeDescriptorNode.keyConstraintNode()); return tableTypeDescriptorNode.modify() .withTableKeywordToken(formatToken(tableKeywordToken, 0, 1, 0, 0)) .withRowTypeParameterNode(rowTypeParameterNode) .withKeyConstraintNode(keyConstraintNode) .apply(); } @Override public KeyTypeConstraintNode transform(KeyTypeConstraintNode keyTypeConstraintNode) { if (!isInLineRange(keyTypeConstraintNode)) { return keyTypeConstraintNode; } Token keyKeywordToken = getToken(keyTypeConstraintNode.keyKeywordToken()); Node typeParameterNode = this.modifyNode(keyTypeConstraintNode.typeParameterNode()); return keyTypeConstraintNode.modify() .withKeyKeywordToken(formatToken(keyKeywordToken, 0, 1, 0, 0)) .withTypeParameterNode(typeParameterNode) .apply(); } @Override public FunctionTypeDescriptorNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) { if (!isInLineRange(functionTypeDescriptorNode)) { return functionTypeDescriptorNode; } Token functionKeyword = getToken(functionTypeDescriptorNode.functionKeyword()); FunctionSignatureNode functionSignature = this.modifyNode(functionTypeDescriptorNode.functionSignature()); return functionTypeDescriptorNode.modify() .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0)) .withFunctionSignature(functionSignature) .apply(); } @Override public ExplicitAnonymousFunctionExpressionNode transform( ExplicitAnonymousFunctionExpressionNode explicitAnonymousFunctionExpressionNode) { if (!isInLineRange(explicitAnonymousFunctionExpressionNode)) { return explicitAnonymousFunctionExpressionNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(explicitAnonymousFunctionExpressionNode.annotations()); Token functionKeyword = getToken(explicitAnonymousFunctionExpressionNode.functionKeyword()); FunctionSignatureNode functionSignature = this.modifyNode(explicitAnonymousFunctionExpressionNode.functionSignature()); FunctionBodyNode functionBody = this.modifyNode(explicitAnonymousFunctionExpressionNode.functionBody()); return explicitAnonymousFunctionExpressionNode.modify() .withAnnotations(annotations) .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0)) .withFunctionSignature(functionSignature) .withFunctionBody(functionBody) .apply(); } @Override public ExpressionFunctionBodyNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) { if (!isInLineRange(expressionFunctionBodyNode)) { return expressionFunctionBodyNode; } Token rightDoubleArrow = getToken(expressionFunctionBodyNode.rightDoubleArrow()); ExpressionNode expression = this.modifyNode(expressionFunctionBodyNode.expression()); Token semicolon = this.modifyToken(expressionFunctionBodyNode.semicolon().orElse(null)); if (semicolon != null) { expressionFunctionBodyNode = expressionFunctionBodyNode.modify() .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)).apply(); } return expressionFunctionBodyNode.modify() .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public ParenthesisedTypeDescriptorNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) { if (!isInLineRange(parenthesisedTypeDescriptorNode)) { return parenthesisedTypeDescriptorNode; } Token openParenToken = getToken(parenthesisedTypeDescriptorNode.openParenToken()); TypeDescriptorNode typedesc = this.modifyNode(parenthesisedTypeDescriptorNode.typedesc()); Token closeParenToken = getToken(parenthesisedTypeDescriptorNode.closeParenToken()); return parenthesisedTypeDescriptorNode.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withTypedesc(typedesc) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public ImplicitNewExpressionNode transform(ImplicitNewExpressionNode implicitNewExpressionNode) { if (!isInLineRange(implicitNewExpressionNode)) { return implicitNewExpressionNode; } Token newKeyword = getToken(implicitNewExpressionNode.newKeyword()); ParenthesizedArgList parenthesizedArgList = this.modifyNode(implicitNewExpressionNode.parenthesizedArgList().orElse(null)); if (parenthesizedArgList != null) { implicitNewExpressionNode = implicitNewExpressionNode.modify() .withParenthesizedArgList(parenthesizedArgList).apply(); } return implicitNewExpressionNode.modify() .withNewKeyword(formatToken(newKeyword, 0, 1, 0, 0)) .apply(); } @Override public QueryConstructTypeNode transform(QueryConstructTypeNode queryConstructTypeNode) { if (!isInLineRange(queryConstructTypeNode)) { return queryConstructTypeNode; } Token keyword = getToken(queryConstructTypeNode.keyword()); KeySpecifierNode keySpecifier = this.modifyNode(queryConstructTypeNode.keySpecifier().orElse(null)); if (keySpecifier != null) { queryConstructTypeNode = queryConstructTypeNode.modify() .withKeySpecifier(keySpecifier).apply(); } return queryConstructTypeNode.modify() .withKeyword(formatToken(keyword, 0, 0, 0, 0)) .apply(); } @Override public FromClauseNode transform(FromClauseNode fromClauseNode) { if (!isInLineRange(fromClauseNode)) { return fromClauseNode; } Token fromKeyword = getToken(fromClauseNode.fromKeyword()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(fromClauseNode.typedBindingPattern()); Token inKeyword = getToken(fromClauseNode.inKeyword()); ExpressionNode expression = this.modifyNode(fromClauseNode.expression()); return fromClauseNode.modify() .withFromKeyword(formatToken(fromKeyword, 1, 1, 0, 0)) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(formatToken(inKeyword, 0, 0, 0, 0)) .withExpression(expression) .apply(); } @Override public WhereClauseNode transform(WhereClauseNode whereClauseNode) { if (!isInLineRange(whereClauseNode)) { return whereClauseNode; } Token whereKeyword = getToken(whereClauseNode.whereKeyword()); ExpressionNode expression = this.modifyNode(whereClauseNode.expression()); return whereClauseNode.modify() .withWhereKeyword(formatToken(whereKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public LetClauseNode transform(LetClauseNode letClauseNode) { if (!isInLineRange(letClauseNode)) { return letClauseNode; } Token letKeyword = getToken(letClauseNode.letKeyword()); SeparatedNodeList<LetVariableDeclarationNode> letVarDeclarations = this.modifySeparatedNodeList(letClauseNode.letVarDeclarations()); return letClauseNode.modify() .withLetKeyword(formatToken(letKeyword, 0, 1, 0, 0)) .withLetVarDeclarations(letVarDeclarations) .apply(); } @Override public QueryPipelineNode transform(QueryPipelineNode queryPipelineNode) { if (!isInLineRange(queryPipelineNode)) { return queryPipelineNode; } FromClauseNode fromClause = this.modifyNode(queryPipelineNode.fromClause()); NodeList<ClauseNode> intermediateClauses = this.modifyNodeList(queryPipelineNode.intermediateClauses()); return queryPipelineNode.modify() .withFromClause(fromClause) .withIntermediateClauses(intermediateClauses) .apply(); } @Override public SelectClauseNode transform(SelectClauseNode selectClauseNode) { if (!isInLineRange(selectClauseNode)) { return selectClauseNode; } Token selectKeyword = getToken(selectClauseNode.selectKeyword()); ExpressionNode expression = this.modifyNode(selectClauseNode.expression()); return selectClauseNode.modify() .withSelectKeyword(formatToken(selectKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public QueryExpressionNode transform(QueryExpressionNode queryExpressionNode) { if (!isInLineRange(queryExpressionNode)) { return queryExpressionNode; } QueryConstructTypeNode queryConstructType = this.modifyNode(queryExpressionNode.queryConstructType().orElse(null)); QueryPipelineNode queryPipeline = this.modifyNode(queryExpressionNode.queryPipeline()); SelectClauseNode selectClause = this.modifyNode(queryExpressionNode.selectClause()); OnConflictClauseNode onConflictClause = this.modifyNode(queryExpressionNode.onConflictClause().orElse(null)); LimitClauseNode limitClause = this.modifyNode(queryExpressionNode.limitClause().orElse(null)); if (queryConstructType != null) { queryExpressionNode = queryExpressionNode.modify() .withQueryConstructType(queryConstructType).apply(); } if (onConflictClause != null) { queryExpressionNode = queryExpressionNode.modify() .withOnConflictClause(onConflictClause).apply(); } if (limitClause != null) { queryExpressionNode = queryExpressionNode.modify() .withLimitClause(limitClause).apply(); } return queryExpressionNode.modify() .withQueryPipeline(queryPipeline) .withSelectClause(selectClause) .apply(); } @Override public IntersectionTypeDescriptorNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) { if (!isInLineRange(intersectionTypeDescriptorNode)) { return intersectionTypeDescriptorNode; } Node leftTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.leftTypeDesc()); Token bitwiseAndToken = getToken(intersectionTypeDescriptorNode.bitwiseAndToken()); Node rightTypeDesc = this.modifyNode(intersectionTypeDescriptorNode.rightTypeDesc()); return intersectionTypeDescriptorNode.modify() .withLeftTypeDesc(leftTypeDesc) .withBitwiseAndToken(formatToken(bitwiseAndToken, 1, 1, 0, 0)) .withRightTypeDesc(rightTypeDesc) .apply(); } @Override public ImplicitAnonymousFunctionParameters transform( ImplicitAnonymousFunctionParameters implicitAnonymousFunctionParameters) { if (!isInLineRange(implicitAnonymousFunctionParameters)) { return implicitAnonymousFunctionParameters; } Token openParenToken = getToken(implicitAnonymousFunctionParameters.openParenToken()); SeparatedNodeList<SimpleNameReferenceNode> parameters = this.modifySeparatedNodeList(implicitAnonymousFunctionParameters.parameters()); Token closeParenToken = getToken(implicitAnonymousFunctionParameters.closeParenToken()); return implicitAnonymousFunctionParameters.modify() .withOpenParenToken(formatToken(openParenToken, 0, 0, 0, 0)) .withParameters(parameters) .withCloseParenToken(formatToken(closeParenToken, 0, 0, 0, 0)) .apply(); } @Override public ImplicitAnonymousFunctionExpressionNode transform( ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) { if (!isInLineRange(implicitAnonymousFunctionExpressionNode)) { return implicitAnonymousFunctionExpressionNode; } Node params = this.modifyNode(implicitAnonymousFunctionExpressionNode.params()); Token rightDoubleArrow = getToken(implicitAnonymousFunctionExpressionNode.rightDoubleArrow()); ExpressionNode expression = this.modifyNode(implicitAnonymousFunctionExpressionNode.expression()); return implicitAnonymousFunctionExpressionNode.modify() .withParams(params) .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public SingletonTypeDescriptorNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) { if (!isInLineRange(singletonTypeDescriptorNode)) { return singletonTypeDescriptorNode; } ExpressionNode simpleContExprNode = this.modifyNode(singletonTypeDescriptorNode.simpleContExprNode()); return singletonTypeDescriptorNode.modify() .withSimpleContExprNode(simpleContExprNode) .apply(); } @Override public MethodDeclarationNode transform(MethodDeclarationNode methodDeclarationNode) { if (!isInLineRange(methodDeclarationNode)) { return methodDeclarationNode; } MetadataNode metadata = this.modifyNode(methodDeclarationNode.metadata().orElse(null)); NodeList<Token> qualifierList = this.modifyNodeList(methodDeclarationNode.qualifierList()); Token functionKeyword = getToken(methodDeclarationNode.functionKeyword()); IdentifierToken methodName = this.modifyNode(methodDeclarationNode.methodName()); FunctionSignatureNode methodSignature = this.modifyNode(methodDeclarationNode.methodSignature()); Token semicolon = getToken(methodDeclarationNode.semicolon()); if (metadata != null) { methodDeclarationNode = methodDeclarationNode.modify() .withMetadata(metadata).apply(); } return methodDeclarationNode.modify() .withQualifierList(qualifierList) .withFunctionKeyword(formatToken(functionKeyword, 0, 1, 0, 0)) .withMethodName(methodName) .withMethodSignature(methodSignature) .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)) .apply(); } @Override public WildcardBindingPatternNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) { if (!isInLineRange(wildcardBindingPatternNode)) { return wildcardBindingPatternNode; } Token underscoreToken = getToken(wildcardBindingPatternNode.underscoreToken()); return wildcardBindingPatternNode.modify() .withUnderscoreToken(formatToken(underscoreToken, 0, 0, 0, 0)) .apply(); } @Override public ErrorBindingPatternNode transform(ErrorBindingPatternNode errorBindingPatternNode) { if (!isInLineRange(errorBindingPatternNode)) { return errorBindingPatternNode; } Token errorKeyword = getToken(errorBindingPatternNode.errorKeyword()); Node typeReference = this.modifyNode(errorBindingPatternNode.typeReference().orElse(null)); Token openParenthesis = getToken(errorBindingPatternNode.openParenthesis()); SeparatedNodeList<BindingPatternNode> argListBindingPatterns = this.modifySeparatedNodeList(errorBindingPatternNode.argListBindingPatterns()); Token closeParenthesis = getToken(errorBindingPatternNode.closeParenthesis()); return errorBindingPatternNode.modify() .withErrorKeyword(formatToken(errorKeyword, 0, 1, 0, 0)) .withTypeReference(typeReference) .withOpenParenthesis(formatToken(openParenthesis, 0, 0, 0, 0)) .withArgListBindingPatterns(argListBindingPatterns) .withCloseParenthesis(formatToken(closeParenthesis, 0, 0, 0, 0)) .apply(); } @Override public NamedArgBindingPatternNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) { if (!isInLineRange(namedArgBindingPatternNode)) { return namedArgBindingPatternNode; } IdentifierToken argName = this.modifyNode(namedArgBindingPatternNode.argName()); Token equalsToken = getToken(namedArgBindingPatternNode.equalsToken()); BindingPatternNode bindingPattern = this.modifyNode(namedArgBindingPatternNode.bindingPattern()); return namedArgBindingPatternNode.modify() .withArgName(argName) .withEqualsToken(formatToken(equalsToken, 1, 1, 0, 0)) .withBindingPattern(bindingPattern) .apply(); } @Override public AsyncSendActionNode transform(AsyncSendActionNode asyncSendActionNode) { if (!isInLineRange(asyncSendActionNode)) { return asyncSendActionNode; } ExpressionNode expression = this.modifyNode(asyncSendActionNode.expression()); Token rightArrowToken = getToken(asyncSendActionNode.rightArrowToken()); SimpleNameReferenceNode peerWorker = this.modifyNode(asyncSendActionNode.peerWorker()); return asyncSendActionNode.modify() .withExpression(expression) .withRightArrowToken(formatToken(rightArrowToken, 1, 1, 0, 0)) .withPeerWorker(peerWorker) .apply(); } @Override public SyncSendActionNode transform(SyncSendActionNode syncSendActionNode) { if (!isInLineRange(syncSendActionNode)) { return syncSendActionNode; } ExpressionNode expression = this.modifyNode(syncSendActionNode.expression()); Token syncSendToken = getToken(syncSendActionNode.syncSendToken()); SimpleNameReferenceNode peerWorker = this.modifyNode(syncSendActionNode.peerWorker()); return syncSendActionNode.modify() .withExpression(expression) .withSyncSendToken(formatToken(syncSendToken, 1, 1, 0, 0)) .withPeerWorker(peerWorker) .apply(); } @Override public ReceiveActionNode transform(ReceiveActionNode receiveActionNode) { if (!isInLineRange(receiveActionNode)) { return receiveActionNode; } Token leftArrow = getToken(receiveActionNode.leftArrow()); SimpleNameReferenceNode receiveWorkers = this.modifyNode(receiveActionNode.receiveWorkers()); return receiveActionNode.modify() .withLeftArrow(formatToken(leftArrow, 1, 1, 0, 0)) .withReceiveWorkers(receiveWorkers) .apply(); } @Override public ReceiveFieldsNode transform(ReceiveFieldsNode receiveFieldsNode) { if (!isInLineRange(receiveFieldsNode)) { return receiveFieldsNode; } Token openBrace = getToken(receiveFieldsNode.openBrace()); SeparatedNodeList<NameReferenceNode> receiveFields = this.modifySeparatedNodeList(receiveFieldsNode.receiveFields()); Token closeBrace = getToken(receiveFieldsNode.closeBrace()); return receiveFieldsNode.modify() .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withReceiveFields(receiveFields) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public RestDescriptorNode transform(RestDescriptorNode restDescriptorNode) { if (!isInLineRange(restDescriptorNode)) { return restDescriptorNode; } TypeDescriptorNode typeDescriptor = this.modifyNode(restDescriptorNode.typeDescriptor()); Token ellipsisToken = getToken(restDescriptorNode.ellipsisToken()); return restDescriptorNode.modify() .withTypeDescriptor(typeDescriptor) .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .apply(); } @Override public DoubleGTTokenNode transform(DoubleGTTokenNode doubleGTTokenNode) { if (!isInLineRange(doubleGTTokenNode)) { return doubleGTTokenNode; } Token openGTToken = getToken(doubleGTTokenNode.openGTToken()); Token endGTToken = getToken(doubleGTTokenNode.endGTToken()); return doubleGTTokenNode.modify() .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0)) .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0)) .apply(); } @Override public TrippleGTTokenNode transform(TrippleGTTokenNode trippleGTTokenNode) { if (!isInLineRange(trippleGTTokenNode)) { return trippleGTTokenNode; } Token openGTToken = getToken(trippleGTTokenNode.openGTToken()); Token middleGTToken = getToken(trippleGTTokenNode.middleGTToken()); Token endGTToken = getToken(trippleGTTokenNode.endGTToken()); return trippleGTTokenNode.modify() .withOpenGTToken(formatToken(openGTToken, 0, 0, 0, 0)) .withMiddleGTToken(formatToken(middleGTToken, 0, 0, 0, 0)) .withEndGTToken(formatToken(endGTToken, 0, 0, 0, 0)) .apply(); } @Override public WaitActionNode transform(WaitActionNode waitActionNode) { if (!isInLineRange(waitActionNode)) { return waitActionNode; } Token waitKeyword = getToken(waitActionNode.waitKeyword()); Node waitFutureExpr = this.modifyNode(waitActionNode.waitFutureExpr()); return waitActionNode.modify() .withWaitKeyword(formatToken(waitKeyword, 1, 1, 0, 0)) .withWaitFutureExpr(waitFutureExpr) .apply(); } @Override public WaitFieldsListNode transform(WaitFieldsListNode waitFieldsListNode) { if (!isInLineRange(waitFieldsListNode)) { return waitFieldsListNode; } Token openBrace = getToken(waitFieldsListNode.openBrace()); SeparatedNodeList<Node> waitFields = this.modifySeparatedNodeList(waitFieldsListNode.waitFields()); Token closeBrace = getToken(waitFieldsListNode.closeBrace()); return waitFieldsListNode.modify() .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withWaitFields(waitFields) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public WaitFieldNode transform(WaitFieldNode waitFieldNode) { if (!isInLineRange(waitFieldNode)) { return waitFieldNode; } SimpleNameReferenceNode fieldName = this.modifyNode(waitFieldNode.fieldName()); Token colon = getToken(waitFieldNode.colon()); ExpressionNode waitFutureExpr = this.modifyNode(waitFieldNode.waitFutureExpr()); return waitFieldNode.modify() .withFieldName(fieldName) .withColon(formatToken(colon, 1, 1, 0, 0)) .withWaitFutureExpr(waitFutureExpr) .apply(); } @Override public AnnotAccessExpressionNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) { if (!isInLineRange(annotAccessExpressionNode)) { return annotAccessExpressionNode; } ExpressionNode expression = this.modifyNode(annotAccessExpressionNode.expression()); Token annotChainingToken = getToken(annotAccessExpressionNode.annotChainingToken()); NameReferenceNode annotTagReference = this.modifyNode(annotAccessExpressionNode.annotTagReference()); return annotAccessExpressionNode.modify() .withExpression(expression) .withAnnotChainingToken(formatToken(annotChainingToken, 0, 0, 0, 0)) .withAnnotTagReference(annotTagReference) .apply(); } @Override public QueryActionNode transform(QueryActionNode queryActionNode) { if (!isInLineRange(queryActionNode)) { return queryActionNode; } QueryPipelineNode queryPipeline = this.modifyNode(queryActionNode.queryPipeline()); Token doKeyword = getToken(queryActionNode.doKeyword()); BlockStatementNode blockStatement = this.modifyNode(queryActionNode.blockStatement()); LimitClauseNode limitClause = this.modifyNode(queryActionNode.limitClause().orElse(null)); if (limitClause != null) { queryActionNode = queryActionNode.modify() .withLimitClause(limitClause).apply(); } return queryActionNode.modify() .withQueryPipeline(queryPipeline) .withDoKeyword(formatToken(doKeyword, 1, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public OptionalFieldAccessExpressionNode transform( OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) { if (!isInLineRange(optionalFieldAccessExpressionNode)) { return optionalFieldAccessExpressionNode; } ExpressionNode expression = this.modifyNode(optionalFieldAccessExpressionNode.expression()); Token optionalChainingToken = getToken(optionalFieldAccessExpressionNode.optionalChainingToken()); NameReferenceNode fieldName = this.modifyNode(optionalFieldAccessExpressionNode.fieldName()); return optionalFieldAccessExpressionNode.modify() .withExpression(expression) .withOptionalChainingToken(formatToken(optionalChainingToken, 0, 0, 0, 0)) .withFieldName(fieldName) .apply(); } @Override public ConditionalExpressionNode transform(ConditionalExpressionNode conditionalExpressionNode) { if (!isInLineRange(conditionalExpressionNode)) { return conditionalExpressionNode; } ExpressionNode lhsExpression = this.modifyNode(conditionalExpressionNode.lhsExpression()); Token questionMarkToken = getToken(conditionalExpressionNode.questionMarkToken()); ExpressionNode middleExpression = this.modifyNode(conditionalExpressionNode.middleExpression()); Token colonToken = getToken(conditionalExpressionNode.colonToken()); ExpressionNode endExpression = this.modifyNode(conditionalExpressionNode.endExpression()); return conditionalExpressionNode.modify() .withLhsExpression(lhsExpression) .withQuestionMarkToken(formatToken(questionMarkToken, 1, 1, 0, 0)) .withMiddleExpression(middleExpression) .withColonToken(formatToken(colonToken, 1, 1, 0, 0)) .withEndExpression(endExpression) .apply(); } @Override public EnumDeclarationNode transform(EnumDeclarationNode enumDeclarationNode) { if (!isInLineRange(enumDeclarationNode)) { return enumDeclarationNode; } MetadataNode metadata = this.modifyNode(enumDeclarationNode.metadata().orElse(null)); Token qualifier = getToken(enumDeclarationNode.qualifier()); Token enumKeywordToken = getToken(enumDeclarationNode.enumKeywordToken()); IdentifierToken identifier = this.modifyNode(enumDeclarationNode.identifier()); Token openBraceToken = getToken(enumDeclarationNode.openBraceToken()); SeparatedNodeList<Node> enumMemberList = this.modifySeparatedNodeList(enumDeclarationNode.enumMemberList()); Token closeBraceToken = getToken(enumDeclarationNode.closeBraceToken()); if (metadata != null) { enumDeclarationNode = enumDeclarationNode.modify() .withMetadata(metadata).apply(); } return enumDeclarationNode.modify() .withQualifier(formatToken(qualifier, 1, 1, 0, 0)) .withEnumKeywordToken(formatToken(enumKeywordToken, 0, 1, 0, 0)) .withIdentifier(identifier) .withOpenBraceToken(formatToken(openBraceToken, 0, 0, 0, 0)) .withEnumMemberList(enumMemberList) .withCloseBraceToken(formatToken(closeBraceToken, 0, 0, 0, 0)) .apply(); } @Override public EnumMemberNode transform(EnumMemberNode enumMemberNode) { if (!isInLineRange(enumMemberNode)) { return enumMemberNode; } MetadataNode metadata = this.modifyNode(enumMemberNode.metadata().orElse(null)); IdentifierToken identifier = this.modifyNode(enumMemberNode.identifier()); Token equalToken = getToken(enumMemberNode.equalToken().orElse(null)); ExpressionNode constExprNode = this.modifyNode(enumMemberNode.constExprNode().orElse(null)); if (metadata != null) { enumMemberNode = enumMemberNode.modify() .withMetadata(metadata).apply(); } return enumMemberNode.modify() .withEqualToken(formatToken(equalToken, 1, 1, 0, 0)) .withIdentifier(identifier) .withConstExprNode(constExprNode) .apply(); } @Override public TransactionStatementNode transform(TransactionStatementNode transactionStatementNode) { if (!isInLineRange(transactionStatementNode)) { return transactionStatementNode; } Token transactionKeyword = getToken(transactionStatementNode.transactionKeyword()); BlockStatementNode blockStatement = this.modifyNode(transactionStatementNode.blockStatement()); return transactionStatementNode.modify() .withTransactionKeyword(formatToken(transactionKeyword, 1, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public RollbackStatementNode transform(RollbackStatementNode rollbackStatementNode) { if (!isInLineRange(rollbackStatementNode)) { return rollbackStatementNode; } Token rollbackKeyword = getToken(rollbackStatementNode.rollbackKeyword()); ExpressionNode expression = this.modifyNode(rollbackStatementNode.expression().orElse(null)); Token semicolon = getToken(rollbackStatementNode.semicolon()); if (expression != null) { rollbackStatementNode = rollbackStatementNode.modify() .withExpression(expression).apply(); } return rollbackStatementNode.modify() .withRollbackKeyword(formatToken(rollbackKeyword, 1, 1, 0, 0)) .withSemicolon(formatToken(semicolon, 0, 0, 0, 0)) .apply(); } @Override public RetryStatementNode transform(RetryStatementNode retryStatementNode) { if (!isInLineRange(retryStatementNode)) { return retryStatementNode; } Token retryKeyword = getToken(retryStatementNode.retryKeyword()); TypeParameterNode typeParameter = this.modifyNode(retryStatementNode.typeParameter().orElse(null)); ParenthesizedArgList arguments = this.modifyNode(retryStatementNode.arguments().orElse(null)); StatementNode retryBody = this.modifyNode(retryStatementNode.retryBody()); if (typeParameter != null) { retryStatementNode = retryStatementNode.modify() .withTypeParameter(typeParameter).apply(); } if (arguments != null) { retryStatementNode = retryStatementNode.modify() .withArguments(arguments).apply(); } return retryStatementNode.modify() .withRetryKeyword(formatToken(retryKeyword, 1, 1, 0, 0)) .withRetryBody(retryBody) .apply(); } @Override public CommitActionNode transform(CommitActionNode commitActionNode) { if (!isInLineRange(commitActionNode)) { return commitActionNode; } Token commitKeyword = getToken(commitActionNode.commitKeyword()); return commitActionNode.modify() .withCommitKeyword(formatToken(commitKeyword, 1, 1, 0, 0)) .apply(); } @Override public TransactionalExpressionNode transform(TransactionalExpressionNode transactionalExpressionNode) { if (!isInLineRange(transactionalExpressionNode)) { return transactionalExpressionNode; } Token transactionalKeyword = getToken(transactionalExpressionNode.transactionalKeyword()); return transactionalExpressionNode.modify() .withTransactionalKeyword(formatToken(transactionalKeyword, 1, 1, 0, 0)) .apply(); } @Override public ServiceConstructorExpressionNode transform( ServiceConstructorExpressionNode serviceConstructorExpressionNode) { if (!isInLineRange(serviceConstructorExpressionNode)) { return serviceConstructorExpressionNode; } NodeList<AnnotationNode> annotations = this.modifyNodeList(serviceConstructorExpressionNode.annotations()); Token serviceKeyword = getToken(serviceConstructorExpressionNode.serviceKeyword()); Node serviceBody = this.modifyNode(serviceConstructorExpressionNode.serviceBody()); return serviceConstructorExpressionNode.modify() .withAnnotations(annotations) .withServiceKeyword(formatToken(serviceKeyword, 1, 1, 0, 0)) .withServiceBody(serviceBody) .apply(); } @Override public TypeReferenceTypeDescNode transform(TypeReferenceTypeDescNode typeReferenceTypeDescNode) { if (!isInLineRange(typeReferenceTypeDescNode)) { return typeReferenceTypeDescNode; } NameReferenceNode typeRef = this.modifyNode(typeReferenceTypeDescNode.typeRef()); return typeReferenceTypeDescNode.modify() .withTypeRef(typeRef) .apply(); } @Override public MatchStatementNode transform(MatchStatementNode matchStatementNode) { if (!isInLineRange(matchStatementNode)) { return matchStatementNode; } Token matchKeyword = getToken(matchStatementNode.matchKeyword()); ExpressionNode condition = this.modifyNode(matchStatementNode.condition()); Token openBrace = getToken(matchStatementNode.openBrace()); NodeList<MatchClauseNode> matchClauses = this.modifyNodeList(matchStatementNode.matchClauses()); Token closeBrace = getToken(matchStatementNode.closeBrace()); return matchStatementNode.modify() .withMatchKeyword(formatToken(matchKeyword, 1, 1, 0, 0)) .withCondition(condition) .withOpenBrace(formatToken(openBrace, 0, 0, 0, 0)) .withMatchClauses(matchClauses) .withCloseBrace(formatToken(closeBrace, 0, 0, 0, 0)) .apply(); } @Override public MatchClauseNode transform(MatchClauseNode matchClauseNode) { if (!isInLineRange(matchClauseNode)) { return matchClauseNode; } SeparatedNodeList<Node> matchPatterns = this.modifySeparatedNodeList(matchClauseNode.matchPatterns()); MatchGuardNode matchGuard = this.modifyNode(matchClauseNode.matchGuard().orElse(null)); Token rightDoubleArrow = getToken(matchClauseNode.rightDoubleArrow()); BlockStatementNode blockStatement = this.modifyNode(matchClauseNode.blockStatement()); if (matchGuard != null) { matchClauseNode = matchClauseNode.modify() .withMatchGuard(matchGuard).apply(); } return matchClauseNode.modify() .withMatchPatterns(matchPatterns) .withRightDoubleArrow(formatToken(rightDoubleArrow, 1, 1, 0, 0)) .withBlockStatement(blockStatement) .apply(); } @Override public MatchGuardNode transform(MatchGuardNode matchGuardNode) { if (!isInLineRange(matchGuardNode)) { return matchGuardNode; } Token ifKeyword = getToken(matchGuardNode.ifKeyword()); ExpressionNode expression = this.modifyNode(matchGuardNode.expression()); return matchGuardNode.modify() .withIfKeyword(formatToken(ifKeyword, 0, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public DistinctTypeDescriptorNode transform(DistinctTypeDescriptorNode distinctTypeDescriptorNode) { if (!isInLineRange(distinctTypeDescriptorNode)) { return distinctTypeDescriptorNode; } Token distinctKeyword = getToken(distinctTypeDescriptorNode.distinctKeyword()); TypeDescriptorNode typeDescriptor = this.modifyNode(distinctTypeDescriptorNode.typeDescriptor()); return distinctTypeDescriptorNode.modify() .withDistinctKeyword(formatToken(distinctKeyword, 1, 1, 0, 0)) .withTypeDescriptor(typeDescriptor) .apply(); } @Override public OnConflictClauseNode transform(OnConflictClauseNode onConflictClauseNode) { if (!isInLineRange(onConflictClauseNode)) { return onConflictClauseNode; } Token onKeyword = getToken(onConflictClauseNode.onKeyword()); Token conflictKeyword = getToken(onConflictClauseNode.conflictKeyword()); ExpressionNode expression = this.modifyNode(onConflictClauseNode.expression()); return onConflictClauseNode.modify() .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0)) .withConflictKeyword(formatToken(conflictKeyword, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public LimitClauseNode transform(LimitClauseNode limitClauseNode) { if (!isInLineRange(limitClauseNode)) { return limitClauseNode; } Token limitKeyword = getToken(limitClauseNode.limitKeyword()); ExpressionNode expression = this.modifyNode(limitClauseNode.expression()); return limitClauseNode.modify() .withLimitKeyword(formatToken(limitKeyword, 1, 1, 0, 0)) .withExpression(expression) .apply(); } @Override public JoinClauseNode transform(JoinClauseNode joinClauseNode) { if (!isInLineRange(joinClauseNode)) { return joinClauseNode; } Token outerKeyword = getToken(joinClauseNode.outerKeyword().orElse(null)); Token joinKeyword = getToken(joinClauseNode.joinKeyword()); TypedBindingPatternNode typedBindingPattern = this.modifyNode(joinClauseNode.typedBindingPattern()); Token inKeyword = getToken(joinClauseNode.inKeyword()); ExpressionNode expression = this.modifyNode(joinClauseNode.expression()); OnClauseNode joinOnCondition = this.modifyNode(joinClauseNode.joinOnCondition()); if (outerKeyword != null) { joinClauseNode = joinClauseNode.modify() .withOuterKeyword(formatToken(outerKeyword, 1, 1, 0, 0)).apply(); } return joinClauseNode.modify() .withJoinKeyword(formatToken(joinKeyword, 1, 1, 0, 0)) .withTypedBindingPattern(typedBindingPattern) .withInKeyword(formatToken(inKeyword, 1, 1, 0, 0)) .withExpression(expression) .withJoinOnCondition(joinOnCondition) .apply(); } @Override public OnClauseNode transform(OnClauseNode onClauseNode) { if (!isInLineRange(onClauseNode)) { return onClauseNode; } Token onKeyword = getToken(onClauseNode.onKeyword()); Token equalsKeyword = getToken(onClauseNode.equalsKeyword()); ExpressionNode lhsExpr = this.modifyNode(onClauseNode.lhsExpression()); ExpressionNode rhsExpr = this.modifyNode(onClauseNode.rhsExpression()); return onClauseNode.modify() .withOnKeyword(formatToken(onKeyword, 1, 1, 0, 0)) .withLhsExpression(lhsExpr) .withEqualsKeyword(formatToken(equalsKeyword, 1, 1, 0, 0)) .withRhsExpression(rhsExpr) .apply(); } @Override public ListMatchPatternNode transform(ListMatchPatternNode listMatchPatternNode) { if (!isInLineRange(listMatchPatternNode)) { return listMatchPatternNode; } Token openBracket = getToken(listMatchPatternNode.openBracket()); SeparatedNodeList<Node> matchPatterns = this.modifySeparatedNodeList(listMatchPatternNode.matchPatterns()); RestMatchPatternNode restMatchPattern = this.modifyNode(listMatchPatternNode.restMatchPattern().orElse(null)); Token closeBracket = getToken(listMatchPatternNode.closeBracket()); return listMatchPatternNode.modify() .withOpenBracket(formatToken(openBracket, 0, 0, 0, 0)) .withMatchPatterns(matchPatterns) .withRestMatchPattern(restMatchPattern) .withCloseBracket(formatToken(closeBracket, 0, 0, 0, 0)) .apply(); } @Override public RestMatchPatternNode transform(RestMatchPatternNode restMatchPatternNode) { if (!isInLineRange(restMatchPatternNode)) { return restMatchPatternNode; } Token ellipsisToken = getToken(restMatchPatternNode.ellipsisToken()); Token varKeywordToken = getToken(restMatchPatternNode.varKeywordToken()); SimpleNameReferenceNode variableName = this.modifyNode(restMatchPatternNode.variableName()); return restMatchPatternNode.modify() .withEllipsisToken(formatToken(ellipsisToken, 0, 0, 0, 0)) .withVarKeywordToken(formatToken(varKeywordToken, 1, 1, 0, 0)) .withVariableName(variableName) .apply(); } @Override public FieldMatchPatternNode transform(FieldMatchPatternNode fieldMatchPatternNode) { if (!isInLineRange(fieldMatchPatternNode)) { return fieldMatchPatternNode; } SimpleNameReferenceNode fieldNameNode = this.modifyNode(fieldMatchPatternNode.fieldNameNode()); Token colonToken = getToken(fieldMatchPatternNode.colonToken()); Node matchPattern = this.modifyNode(fieldMatchPatternNode.matchPattern()); return fieldMatchPatternNode.modify() .withFieldNameNode(fieldNameNode) .withColonToken(formatToken(colonToken, 1, 1, 0, 0)) .withMatchPattern(matchPattern) .apply(); } @Override public FunctionalMatchPatternNode transform(FunctionalMatchPatternNode functionalMatchPatternNode) { if (!isInLineRange(functionalMatchPatternNode)) { return functionalMatchPatternNode; } Node typeRef = this.modifyNode(functionalMatchPatternNode.typeRef()); Token openParenthesisToken = getToken(functionalMatchPatternNode.openParenthesisToken()); SeparatedNodeList<Node> argListMatchPatternNode = this.modifySeparatedNodeList(functionalMatchPatternNode.argListMatchPatternNode()); Token closeParenthesisToken = getToken(functionalMatchPatternNode.closeParenthesisToken()); return functionalMatchPatternNode.modify() .withTypeRef(typeRef) .withOpenParenthesisToken(formatToken(openParenthesisToken, 0, 0, 0, 0)) .withArgListMatchPatternNode(argListMatchPatternNode) .withCloseParenthesisToken(formatToken(closeParenthesisToken, 0, 0, 0, 0)) .apply(); } @Override public NamedArgMatchPatternNode transform(NamedArgMatchPatternNode namedArgMatchPatternNode) { if (!isInLineRange(namedArgMatchPatternNode)) { return namedArgMatchPatternNode; } IdentifierToken identifier = this.modifyNode(namedArgMatchPatternNode.identifier()); Token equalToken = getToken(namedArgMatchPatternNode.equalToken()); Node matchPattern = this.modifyNode(namedArgMatchPatternNode.matchPattern()); return namedArgMatchPatternNode.modify() .withIdentifier(identifier) .withEqualToken(formatToken(equalToken, 1, 1, 0, 0)) .withMatchPattern(matchPattern) .apply(); } @Override public MarkdownDocumentationNode transform(MarkdownDocumentationNode markdownDocumentationNode) { if (!isInLineRange(markdownDocumentationNode)) { return markdownDocumentationNode; } NodeList<Node> documentationLines = this.modifyNodeList(markdownDocumentationNode.documentationLines()); return markdownDocumentationNode.modify() .withDocumentationLines(documentationLines) .apply(); } @Override public MarkdownDocumentationLineNode transform(MarkdownDocumentationLineNode markdownDocumentationLineNode) { if (!isInLineRange(markdownDocumentationLineNode)) { return markdownDocumentationLineNode; } Token hashToken = getToken(markdownDocumentationLineNode.hashToken()); NodeList<Node> documentElements = this.modifyNodeList(markdownDocumentationLineNode.documentElements()); return markdownDocumentationLineNode.modify() .withDocumentElements(documentElements) .withHashToken(formatToken(hashToken, 1, 1, 0, 0)) .apply(); } @Override public MarkdownParameterDocumentationLineNode transform( MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) { if (!isInLineRange(markdownParameterDocumentationLineNode)) { return markdownParameterDocumentationLineNode; } Token hashToken = getToken(markdownParameterDocumentationLineNode.hashToken()); Token plusToken = getToken(markdownParameterDocumentationLineNode.plusToken()); Token parameterName = getToken(markdownParameterDocumentationLineNode.parameterName()); Token minusToken = getToken(markdownParameterDocumentationLineNode.minusToken()); NodeList<Node> documentElements = this.modifyNodeList(markdownParameterDocumentationLineNode.documentElements()); return markdownParameterDocumentationLineNode.modify() .withHashToken(formatToken(hashToken, 1, 1, 0, 0)) .withPlusToken(formatToken(plusToken, 1, 1, 0, 0)) .withParameterName(formatToken(parameterName, 1, 1, 0, 0)) .withMinusToken(formatToken(minusToken, 1, 1, 0, 0)) .withDocumentElements(documentElements) .apply(); } @Override public DocumentationReferenceNode transform(DocumentationReferenceNode documentationReferenceNode) { if (!isInLineRange(documentationReferenceNode)) { return documentationReferenceNode; } Token referenceType = getToken(documentationReferenceNode.referenceType().orElse(null)); Token startBacktick = getToken(documentationReferenceNode.startBacktick()); Node backtickContent = this.modifyNode(documentationReferenceNode.backtickContent()); Token endBacktick = getToken(documentationReferenceNode.endBacktick()); if (referenceType != null) { documentationReferenceNode = documentationReferenceNode.modify() .withReferenceType(referenceType).apply(); } return documentationReferenceNode.modify() .withStartBacktick(formatToken(startBacktick, 0, 0, 0, 0)) .withBacktickContent(backtickContent) .withEndBacktick(formatToken(endBacktick, 0, 0, 0, 0)) .apply(); } @Override public OrderByClauseNode transform(OrderByClauseNode orderByClauseNode) { if (!isInLineRange(orderByClauseNode)) { return orderByClauseNode; } Token orderKeyword = getToken(orderByClauseNode.orderKeyword()); Token byKeyword = getToken(orderByClauseNode.byKeyword()); SeparatedNodeList<OrderKeyNode> orderKey = this.modifySeparatedNodeList(orderByClauseNode.orderKey()); return orderByClauseNode.modify() .withOrderKeyword(formatToken(orderKeyword, 1, 1, 0, 0)) .withByKeyword(formatToken(byKeyword, 1, 1, 0, 0)) .withOrderKey(orderKey) .apply(); } @Override public OrderKeyNode transform(OrderKeyNode orderKeyNode) { if (!isInLineRange(orderKeyNode)) { return orderKeyNode; } ExpressionNode expression = this.modifyNode(orderKeyNode.expression()); Token orderDirection = getToken(orderKeyNode.orderDirection().orElse(null)); if (orderDirection != null) { orderKeyNode = orderKeyNode.modify() .withOrderDirection(formatToken(orderDirection, 1, 1, 0, 0)).apply(); } return orderKeyNode.modify() .withExpression(expression) .apply(); } /** * Update the minutiae and return the token. * * @param token token * @param leadingSpaces leading spaces * @param trailingSpaces trailing spaces * @param leadingNewLines leading new lines * @param trailingNewLines trailing new lines * @return updated token */ private Token formatToken(Token token, int leadingSpaces, int trailingSpaces, int leadingNewLines, int trailingNewLines) { if (token == null) { return token; } MinutiaeList leadingMinutiaeList = token.leadingMinutiae(); MinutiaeList trailingMinutiaeList = token.trailingMinutiae(); MinutiaeList newLeadingMinutiaeList = modifyMinutiaeList(leadingMinutiaeList, leadingSpaces, leadingNewLines); MinutiaeList newTrailingMinutiaeList = modifyMinutiaeList(trailingMinutiaeList, trailingSpaces, trailingNewLines); return token.modify(newLeadingMinutiaeList, newTrailingMinutiaeList); } private MinutiaeList modifyMinutiaeList(MinutiaeList minutiaeList, int spaces, int newLines) { Minutiae minutiae = NodeFactory.createWhitespaceMinutiae(getWhiteSpaces(spaces, newLines)); return minutiaeList.add(minutiae); } private String getWhiteSpaces(int column, int newLines) { StringBuilder whiteSpaces = new StringBuilder(); for (int i = 0; i <= (newLines - 1); i++) { whiteSpaces.append(System.getProperty("line.separator")); } for (int i = 0; i <= (column - 1); i++) { whiteSpaces.append(" "); } return whiteSpaces.toString(); } /** * Initialize the token with empty minutiae lists. * * @param node node * @return token with empty minutiae */ private <T extends Token> Token getToken(T node) { if (node == null) { return node; } MinutiaeList leadingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList(); MinutiaeList trailingMinutiaeList = AbstractNodeFactory.createEmptyMinutiaeList(); if (node.containsLeadingMinutiae()) { leadingMinutiaeList = getCommentMinutiae(node.leadingMinutiae(), true); } if (node.containsTrailingMinutiae()) { trailingMinutiaeList = getCommentMinutiae(node.trailingMinutiae(), false); } return node.modify(leadingMinutiaeList, trailingMinutiaeList); } private MinutiaeList getCommentMinutiae(MinutiaeList minutiaeList, boolean isLeading) { MinutiaeList minutiaes = AbstractNodeFactory.createEmptyMinutiaeList(); for (int i = 0; i < minutiaeList.size(); i++) { if (minutiaeList.get(i).kind().equals(SyntaxKind.COMMENT_MINUTIAE)) { if (i > 0) { minutiaes = minutiaes.add(minutiaeList.get(i - 1)); } minutiaes = minutiaes.add(minutiaeList.get(i)); if ((i + 1) < minutiaeList.size() && isLeading) { minutiaes = minutiaes.add(minutiaeList.get(i + 1)); } } } return minutiaes; } private <T extends Node> Node getParent(T node, SyntaxKind syntaxKind) { Node parent = node.parent(); if (parent == null) { parent = node; } SyntaxKind parentKind = parent.kind(); if (parentKind == SyntaxKind.MODULE_VAR_DECL) { if (parent.parent() != null && parent.parent().kind() == SyntaxKind.MODULE_PART && syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return null; } return parent; } else if (parentKind == SyntaxKind.FUNCTION_DEFINITION || parentKind == SyntaxKind.IF_ELSE_STATEMENT || parentKind == SyntaxKind.ELSE_BLOCK || parentKind == SyntaxKind.SPECIFIC_FIELD || parentKind == SyntaxKind.WHILE_STATEMENT) { return parent; } else if (syntaxKind == SyntaxKind.SIMPLE_NAME_REFERENCE) { if (parentKind == SyntaxKind.REQUIRED_PARAM || parentKind == SyntaxKind.POSITIONAL_ARG || parentKind == SyntaxKind.BINARY_EXPRESSION || parentKind == SyntaxKind.RETURN_STATEMENT || parentKind == SyntaxKind.LOCAL_VAR_DECL || (parentKind == SyntaxKind.FUNCTION_CALL && parent.parent() != null && parent.parent().kind() == SyntaxKind.ASSIGNMENT_STATEMENT)) { return null; } return getParent(parent, syntaxKind); } else if (parentKind == SyntaxKind.SERVICE_DECLARATION || parentKind == SyntaxKind.BINARY_EXPRESSION) { if (syntaxKind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return null; } return parent; } else if (parentKind == SyntaxKind.REQUIRED_PARAM || parentKind == SyntaxKind.RETURN_TYPE_DESCRIPTOR) { return null; } else if (parent.parent() != null) { return getParent(parent, syntaxKind); } else { return null; } } /** * Get the node position. * * @param node node * @return node position */ private DiagnosticPos getPosition(Node node) { if (node == null) { return null; } LineRange range = node.lineRange(); LinePosition startPos = range.startLine(); LinePosition endPos = range.endLine(); return new DiagnosticPos(null, startPos.line() + 1, endPos.line() + 1, startPos.offset(), endPos.offset()); } /** * return the indented start column. * * @param node node * @param syntaxKind node kind * @param addSpaces add spaces or not * @return start position */ private int getStartColumn(Node node, SyntaxKind syntaxKind, boolean addSpaces) { Node parent = getParent(node, syntaxKind); if (parent != null) { return getPosition(parent).sCol + (addSpaces ? 4 : 0); } return 0; } private boolean isInLineRange(Node node) { if (this.lineRange == null) { return true; } int nodeStartLine = node.lineRange().startLine().line(); int nodeStartOffset = node.lineRange().startLine().offset(); int nodeEndLine = node.lineRange().endLine().line(); int nodeEndOffset = node.lineRange().endLine().offset(); int startLine = this.lineRange.startLine().line(); int startOffset = this.lineRange.startLine().offset(); int endLine = this.lineRange.endLine().line(); int endOffset = this.lineRange.endLine().offset(); if (nodeStartLine >= startLine && nodeEndLine <= endLine) { if (nodeStartLine == startLine || nodeEndLine == endLine) { return nodeStartOffset >= startOffset && nodeEndOffset <= endOffset; } return true; } return false; } public FormattingOptions getFormattingOptions() { return formattingOptions; } void setFormattingOptions(FormattingOptions formattingOptions) { this.formattingOptions = formattingOptions; } void setLineRange(LineRange lineRange) { this.lineRange = lineRange; } }
Good catch. I wasn't previously thinking about state restoring. I think type serializer does need to be checkpointed and restored (code [link](https://github.com/apache/flink/blob/master/flink-runtime/src/main/java/org/apache/flink/runtime/state/StateSerializerProvider.java#L99)) I'm less sure about how type info serialization is used as I didn't manage to find a code reference for its usage.
private void writeObject(ObjectOutputStream oos) throws IOException { byte[] schemaStrInBytes = schema.toString(false).getBytes(StandardCharsets.UTF_8); oos.writeInt(schemaStrInBytes.length); oos.write(schemaStrInBytes); }
byte[] schemaStrInBytes = schema.toString(false).getBytes(StandardCharsets.UTF_8);
private void writeObject(ObjectOutputStream oos) throws IOException { byte[] schemaStrInBytes = schema.toString(false).getBytes(StandardCharsets.UTF_8); oos.writeInt(schemaStrInBytes.length); oos.write(schemaStrInBytes); }
class GenericRecordAvroTypeInfo extends TypeInformation<GenericRecord> { private static final long serialVersionUID = 4141977586453820650L; private transient Schema schema; public GenericRecordAvroTypeInfo(Schema schema) { this.schema = checkNotNull(schema); } @Override public boolean isBasicType() { return false; } @Override public boolean isTupleType() { return false; } @Override public int getArity() { return 1; } @Override public int getTotalFields() { return 1; } @Override public Class<GenericRecord> getTypeClass() { return GenericRecord.class; } @Override public boolean isKeyType() { return false; } @Override public TypeSerializer<GenericRecord> createSerializer(ExecutionConfig config) { return new AvroSerializer<>(GenericRecord.class, schema); } @Override public String toString() { return String.format("GenericRecord(\"%s\")", schema.toString()); } @Override public boolean equals(Object obj) { if (obj instanceof GenericRecordAvroTypeInfo) { GenericRecordAvroTypeInfo avroTypeInfo = (GenericRecordAvroTypeInfo) obj; return Objects.equals(avroTypeInfo.schema, schema); } else { return false; } } @Override public int hashCode() { return Objects.hashCode(schema); } @Override public boolean canEqual(Object obj) { return obj instanceof GenericRecordAvroTypeInfo; } private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { int len = ois.readInt(); byte[] content = new byte[len]; ois.readFully(content); this.schema = new Schema.Parser().parse(new String(content, StandardCharsets.UTF_8)); } }
class GenericRecordAvroTypeInfo extends TypeInformation<GenericRecord> { private static final long serialVersionUID = 4141977586453820650L; private transient Schema schema; public GenericRecordAvroTypeInfo(Schema schema) { this.schema = checkNotNull(schema); } @Override public boolean isBasicType() { return false; } @Override public boolean isTupleType() { return false; } @Override public int getArity() { return 1; } @Override public int getTotalFields() { return 1; } @Override public Class<GenericRecord> getTypeClass() { return GenericRecord.class; } @Override public boolean isKeyType() { return false; } @Override public TypeSerializer<GenericRecord> createSerializer(ExecutionConfig config) { return new AvroSerializer<>(GenericRecord.class, schema); } @Override public String toString() { return String.format("GenericRecord(\"%s\")", schema.toString()); } @Override public boolean equals(Object obj) { if (obj instanceof GenericRecordAvroTypeInfo) { GenericRecordAvroTypeInfo avroTypeInfo = (GenericRecordAvroTypeInfo) obj; return Objects.equals(avroTypeInfo.schema, schema); } else { return false; } } @Override public int hashCode() { return Objects.hashCode(schema); } @Override public boolean canEqual(Object obj) { return obj instanceof GenericRecordAvroTypeInfo; } private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { int len = ois.readInt(); byte[] content = new byte[len]; ois.readFully(content); this.schema = new Schema.Parser().parse(new String(content, StandardCharsets.UTF_8)); } }
Can cpuPeriod() and cpuQuota() be updated to return long?
private CreateContainerCmd createCreateContainerCmd() { List<Bind> volumeBinds = volumeBindSpecs.stream().map(Bind::parse).collect(Collectors.toList()); final HostConfig hostConfig = new HostConfig() .withSecurityOpts(new ArrayList<>(securityOpts)) .withBinds(volumeBinds) .withUlimits(ulimits) .withCapAdd(addCapabilities.toArray(new Capability[0])) .withCapDrop(dropCapabilities.toArray(new Capability[0])) .withPrivileged(privileged); containerResources.ifPresent(cr -> hostConfig .withCpuShares(cr.cpuShares()) .withMemory(cr.memoryBytes()) .withMemorySwap(cr.memoryBytes()) .withCpuPeriod(cr.cpuQuota() > 0 ? (long) cr.cpuPeriod() : null) .withCpuQuota(cr.cpuQuota() > 0 ? (long) cr.cpuQuota() : null)); final CreateContainerCmd containerCmd = docker .createContainerCmd(dockerImage.asString()) .withHostConfig(hostConfig) .withName(containerName.asString()) .withLabels(labels) .withEnv(environmentAssignments); networkMode .filter(mode -> ! mode.toLowerCase().equals("host")) .ifPresent(mode -> containerCmd.withMacAddress(generateMACAddress(hostName, ipv4Address, ipv6Address))); hostName.ifPresent(containerCmd::withHostName); networkMode.ifPresent(hostConfig::withNetworkMode); ipv4Address.ifPresent(containerCmd::withIpv4Address); ipv6Address.ifPresent(containerCmd::withIpv6Address); entrypoint.ifPresent(containerCmd::withEntrypoint); return containerCmd; }
.withCpuPeriod(cr.cpuQuota() > 0 ? (long) cr.cpuPeriod() : null)
private CreateContainerCmd createCreateContainerCmd() { List<Bind> volumeBinds = volumeBindSpecs.stream().map(Bind::parse).collect(Collectors.toList()); final HostConfig hostConfig = new HostConfig() .withSecurityOpts(new ArrayList<>(securityOpts)) .withBinds(volumeBinds) .withUlimits(ulimits) .withCapAdd(addCapabilities.toArray(new Capability[0])) .withCapDrop(dropCapabilities.toArray(new Capability[0])) .withPrivileged(privileged); containerResources.ifPresent(cr -> hostConfig .withCpuShares(cr.cpuShares()) .withMemory(cr.memoryBytes()) .withMemorySwap(cr.memoryBytes()) .withCpuPeriod(cr.cpuQuota() > 0 ? (long) cr.cpuPeriod() : null) .withCpuQuota(cr.cpuQuota() > 0 ? (long) cr.cpuQuota() : null)); final CreateContainerCmd containerCmd = docker .createContainerCmd(dockerImage.asString()) .withHostConfig(hostConfig) .withName(containerName.asString()) .withLabels(labels) .withEnv(environmentAssignments); networkMode .filter(mode -> ! mode.toLowerCase().equals("host")) .ifPresent(mode -> containerCmd.withMacAddress(generateMACAddress(hostName, ipv4Address, ipv6Address))); hostName.ifPresent(containerCmd::withHostName); networkMode.ifPresent(hostConfig::withNetworkMode); ipv4Address.ifPresent(containerCmd::withIpv4Address); ipv6Address.ifPresent(containerCmd::withIpv6Address); entrypoint.ifPresent(containerCmd::withEntrypoint); return containerCmd; }
class CreateContainerCommandImpl implements Docker.CreateContainerCommand { private final DockerClient docker; private final DockerImage dockerImage; private final ContainerName containerName; private final Map<String, String> labels = new HashMap<>(); private final List<String> environmentAssignments = new ArrayList<>(); private final List<String> volumeBindSpecs = new ArrayList<>(); private final List<Ulimit> ulimits = new ArrayList<>(); private final Set<Capability> addCapabilities = new HashSet<>(); private final Set<Capability> dropCapabilities = new HashSet<>(); private final Set<String> securityOpts = new HashSet<>(); private Optional<String> hostName = Optional.empty(); private Optional<ContainerResources> containerResources = Optional.empty(); private Optional<String> networkMode = Optional.empty(); private Optional<String> ipv4Address = Optional.empty(); private Optional<String> ipv6Address = Optional.empty(); private Optional<String[]> entrypoint = Optional.empty(); private boolean privileged = false; CreateContainerCommandImpl(DockerClient docker, DockerImage dockerImage, ContainerName containerName) { this.docker = docker; this.dockerImage = dockerImage; this.containerName = containerName; } @Override public Docker.CreateContainerCommand withHostName(String hostName) { this.hostName = Optional.of(hostName); return this; } @Override public Docker.CreateContainerCommand withResources(ContainerResources containerResources) { this.containerResources = Optional.of(containerResources); return this; } @Override public Docker.CreateContainerCommand withLabel(String name, String value) { assert !name.contains("="); labels.put(name, value); return this; } public Docker.CreateContainerCommand withManagedBy(String manager) { return withLabel(LABEL_NAME_MANAGEDBY, manager); } @Override public Docker.CreateContainerCommand withAddCapability(String capabilityName) { addCapabilities.add(Capability.valueOf(capabilityName)); return this; } @Override public Docker.CreateContainerCommand withDropCapability(String capabilityName) { dropCapabilities.add(Capability.valueOf(capabilityName)); return this; } @Override public Docker.CreateContainerCommand withSecurityOpts(String securityOpt) { securityOpts.add(securityOpt); return this; } @Override public Docker.CreateContainerCommand withPrivileged(boolean privileged) { this.privileged = privileged; return this; } @Override public Docker.CreateContainerCommand withUlimit(String name, int softLimit, int hardLimit) { ulimits.add(new Ulimit(name, softLimit, hardLimit)); return this; } @Override public Docker.CreateContainerCommand withEntrypoint(String... entrypoint) { if (entrypoint.length < 1) throw new IllegalArgumentException("Entrypoint must contain at least 1 element"); this.entrypoint = Optional.of(entrypoint); return this; } @Override public Docker.CreateContainerCommand withEnvironment(String name, String value) { assert name.indexOf('=') == -1; environmentAssignments.add(name + "=" + value); return this; } @Override public Docker.CreateContainerCommand withVolume(Path path, Path volumePath) { volumeBindSpecs.add(path + ":" + volumePath + ":Z"); return this; } @Override public Docker.CreateContainerCommand withSharedVolume(Path path, Path volumePath) { volumeBindSpecs.add(path + ":" + volumePath + ":z"); return this; } @Override public Docker.CreateContainerCommand withNetworkMode(String mode) { networkMode = Optional.of(mode); return this; } @Override public Docker.CreateContainerCommand withIpAddress(InetAddress address) { if (address instanceof Inet6Address) { ipv6Address = Optional.of(address.getHostAddress()); } else { ipv4Address = Optional.of(address.getHostAddress()); } return this; } @Override public void create() { try { createCreateContainerCmd().exec(); } catch (RuntimeException e) { throw new DockerException("Failed to create container " + toString(), e); } } /** Maps ("--env", {"A", "B", "C"}) to "--env A --env B --env C" */ private static String toRepeatedOption(String option, Collection<String> optionValues) { return optionValues.stream() .map(optionValue -> option + " " + optionValue) .collect(Collectors.joining(" ")); } private static String toOptionalOption(String option, Optional<?> value) { return value.map(o -> option + " " + o).orElse(""); } private static String toFlagOption(String option, boolean value) { return value ? option : ""; } /** Make toString() print the equivalent arguments to 'docker run' */ @Override public String toString() { List<String> labelList = labels.entrySet().stream() .map(entry -> entry.getKey() + "=" + entry.getValue()).collect(Collectors.toList()); List<String> ulimitList = ulimits.stream() .map(ulimit -> ulimit.getName() + "=" + ulimit.getSoft() + ":" + ulimit.getHard()) .collect(Collectors.toList()); List<String> addCapabilitiesList = addCapabilities.stream().map(Enum<Capability>::toString).sorted().collect(Collectors.toList()); List<String> dropCapabilitiesList = dropCapabilities.stream().map(Enum<Capability>::toString).sorted().collect(Collectors.toList()); Optional<String> entrypointExecuteable = entrypoint.map(args -> args[0]); String entrypointArgs = entrypoint.map(Stream::of).orElseGet(Stream::empty) .skip(1) .collect(Collectors.joining(" ")); return Stream.of( "--name " + containerName.asString(), toOptionalOption("--hostname", hostName), toOptionalOption("--cpu-shares", containerResources.map(ContainerResources::cpuShares)), toOptionalOption("--cpus", containerResources.map(ContainerResources::cpus)), toOptionalOption("--memory", containerResources.map(ContainerResources::memoryBytes)), toRepeatedOption("--label", labelList), toRepeatedOption("--ulimit", ulimitList), toRepeatedOption("--env", environmentAssignments), toRepeatedOption("--volume", volumeBindSpecs), toRepeatedOption("--cap-add", addCapabilitiesList), toRepeatedOption("--cap-drop", dropCapabilitiesList), toRepeatedOption("--security-opt", securityOpts), toOptionalOption("--net", networkMode), toOptionalOption("--ip", ipv4Address), toOptionalOption("--ip6", ipv6Address), toOptionalOption("--entrypoint", entrypointExecuteable), toFlagOption("--privileged", privileged), dockerImage.asString(), entrypointArgs) .filter(s -> !s.isEmpty()) .collect(Collectors.joining(" ")); } /** * Generates a pseudo-random MAC address based on the hostname, IPv4- and IPv6-address. */ static String generateMACAddress(Optional<String> hostname, Optional<String> ipv4Address, Optional<String> ipv6Address) { final String seed = hostname.orElse("") + ipv4Address.orElse("") + ipv6Address.orElse(""); Random rand = getPRNG(seed); byte[] macAddr = new byte[6]; rand.nextBytes(macAddr); macAddr[0] = (byte) ((macAddr[0] | 2) & 254); return IntStream.range(0, macAddr.length) .mapToObj(i -> String.format("%02x", macAddr[i])) .collect(Collectors.joining(":")); } private static Random getPRNG(String seed) { try { SecureRandom rand = SecureRandom.getInstance("SHA1PRNG"); rand.setSeed(seed.getBytes()); return rand; } catch (NoSuchAlgorithmException e) { throw new RuntimeException("Failed to get pseudo-random number generator", e); } } }
class CreateContainerCommandImpl implements Docker.CreateContainerCommand { private final DockerClient docker; private final DockerImage dockerImage; private final ContainerName containerName; private final Map<String, String> labels = new HashMap<>(); private final List<String> environmentAssignments = new ArrayList<>(); private final List<String> volumeBindSpecs = new ArrayList<>(); private final List<Ulimit> ulimits = new ArrayList<>(); private final Set<Capability> addCapabilities = new HashSet<>(); private final Set<Capability> dropCapabilities = new HashSet<>(); private final Set<String> securityOpts = new HashSet<>(); private Optional<String> hostName = Optional.empty(); private Optional<ContainerResources> containerResources = Optional.empty(); private Optional<String> networkMode = Optional.empty(); private Optional<String> ipv4Address = Optional.empty(); private Optional<String> ipv6Address = Optional.empty(); private Optional<String[]> entrypoint = Optional.empty(); private boolean privileged = false; CreateContainerCommandImpl(DockerClient docker, DockerImage dockerImage, ContainerName containerName) { this.docker = docker; this.dockerImage = dockerImage; this.containerName = containerName; } @Override public Docker.CreateContainerCommand withHostName(String hostName) { this.hostName = Optional.of(hostName); return this; } @Override public Docker.CreateContainerCommand withResources(ContainerResources containerResources) { this.containerResources = Optional.of(containerResources); return this; } @Override public Docker.CreateContainerCommand withLabel(String name, String value) { assert !name.contains("="); labels.put(name, value); return this; } public Docker.CreateContainerCommand withManagedBy(String manager) { return withLabel(LABEL_NAME_MANAGEDBY, manager); } @Override public Docker.CreateContainerCommand withAddCapability(String capabilityName) { addCapabilities.add(Capability.valueOf(capabilityName)); return this; } @Override public Docker.CreateContainerCommand withDropCapability(String capabilityName) { dropCapabilities.add(Capability.valueOf(capabilityName)); return this; } @Override public Docker.CreateContainerCommand withSecurityOpts(String securityOpt) { securityOpts.add(securityOpt); return this; } @Override public Docker.CreateContainerCommand withPrivileged(boolean privileged) { this.privileged = privileged; return this; } @Override public Docker.CreateContainerCommand withUlimit(String name, int softLimit, int hardLimit) { ulimits.add(new Ulimit(name, softLimit, hardLimit)); return this; } @Override public Docker.CreateContainerCommand withEntrypoint(String... entrypoint) { if (entrypoint.length < 1) throw new IllegalArgumentException("Entrypoint must contain at least 1 element"); this.entrypoint = Optional.of(entrypoint); return this; } @Override public Docker.CreateContainerCommand withEnvironment(String name, String value) { assert name.indexOf('=') == -1; environmentAssignments.add(name + "=" + value); return this; } @Override public Docker.CreateContainerCommand withVolume(Path path, Path volumePath) { volumeBindSpecs.add(path + ":" + volumePath + ":Z"); return this; } @Override public Docker.CreateContainerCommand withSharedVolume(Path path, Path volumePath) { volumeBindSpecs.add(path + ":" + volumePath + ":z"); return this; } @Override public Docker.CreateContainerCommand withNetworkMode(String mode) { networkMode = Optional.of(mode); return this; } @Override public Docker.CreateContainerCommand withIpAddress(InetAddress address) { if (address instanceof Inet6Address) { ipv6Address = Optional.of(address.getHostAddress()); } else { ipv4Address = Optional.of(address.getHostAddress()); } return this; } @Override public void create() { try { createCreateContainerCmd().exec(); } catch (RuntimeException e) { throw new DockerException("Failed to create container " + toString(), e); } } /** Maps ("--env", {"A", "B", "C"}) to "--env A --env B --env C" */ private static String toRepeatedOption(String option, Collection<String> optionValues) { return optionValues.stream() .map(optionValue -> option + " " + optionValue) .collect(Collectors.joining(" ")); } private static String toOptionalOption(String option, Optional<?> value) { return value.map(o -> option + " " + o).orElse(""); } private static String toFlagOption(String option, boolean value) { return value ? option : ""; } /** Make toString() print the equivalent arguments to 'docker run' */ @Override public String toString() { List<String> labelList = labels.entrySet().stream() .map(entry -> entry.getKey() + "=" + entry.getValue()).collect(Collectors.toList()); List<String> ulimitList = ulimits.stream() .map(ulimit -> ulimit.getName() + "=" + ulimit.getSoft() + ":" + ulimit.getHard()) .collect(Collectors.toList()); List<String> addCapabilitiesList = addCapabilities.stream().map(Enum<Capability>::toString).sorted().collect(Collectors.toList()); List<String> dropCapabilitiesList = dropCapabilities.stream().map(Enum<Capability>::toString).sorted().collect(Collectors.toList()); Optional<String> entrypointExecuteable = entrypoint.map(args -> args[0]); String entrypointArgs = entrypoint.map(Stream::of).orElseGet(Stream::empty) .skip(1) .collect(Collectors.joining(" ")); return Stream.of( "--name " + containerName.asString(), toOptionalOption("--hostname", hostName), toOptionalOption("--cpu-shares", containerResources.map(ContainerResources::cpuShares)), toOptionalOption("--cpus", containerResources.map(ContainerResources::cpus)), toOptionalOption("--memory", containerResources.map(ContainerResources::memoryBytes)), toRepeatedOption("--label", labelList), toRepeatedOption("--ulimit", ulimitList), toRepeatedOption("--env", environmentAssignments), toRepeatedOption("--volume", volumeBindSpecs), toRepeatedOption("--cap-add", addCapabilitiesList), toRepeatedOption("--cap-drop", dropCapabilitiesList), toRepeatedOption("--security-opt", securityOpts), toOptionalOption("--net", networkMode), toOptionalOption("--ip", ipv4Address), toOptionalOption("--ip6", ipv6Address), toOptionalOption("--entrypoint", entrypointExecuteable), toFlagOption("--privileged", privileged), dockerImage.asString(), entrypointArgs) .filter(s -> !s.isEmpty()) .collect(Collectors.joining(" ")); } /** * Generates a pseudo-random MAC address based on the hostname, IPv4- and IPv6-address. */ static String generateMACAddress(Optional<String> hostname, Optional<String> ipv4Address, Optional<String> ipv6Address) { final String seed = hostname.orElse("") + ipv4Address.orElse("") + ipv6Address.orElse(""); Random rand = getPRNG(seed); byte[] macAddr = new byte[6]; rand.nextBytes(macAddr); macAddr[0] = (byte) ((macAddr[0] | 2) & 254); return IntStream.range(0, macAddr.length) .mapToObj(i -> String.format("%02x", macAddr[i])) .collect(Collectors.joining(":")); } private static Random getPRNG(String seed) { try { SecureRandom rand = SecureRandom.getInstance("SHA1PRNG"); rand.setSeed(seed.getBytes()); return rand; } catch (NoSuchAlgorithmException e) { throw new RuntimeException("Failed to get pseudo-random number generator", e); } } }
@Sanne the kube use case is only valid for demos IMO, and even then providing a cache cr is a more production real use case than creating it from the properties, but for simplicity is clearly something easy to be done. I will remove the file and let URI and pasted configuration as it is
private ConfigurationBuilder builderFromProperties(Properties properties) { ConfigurationBuilder builder = new ConfigurationBuilder(); Object marshallerInstance = properties.remove(ConfigurationProperties.MARSHALLER); if (marshallerInstance != null) { if (marshallerInstance instanceof ProtoStreamMarshaller) { handleProtoStreamMarshaller((ProtoStreamMarshaller) marshallerInstance, properties, beanManager); } builder.marshaller((Marshaller) marshallerInstance); } InfinispanClientRuntimeConfig infinispanClientRuntimeConfig = this.infinispanClientRuntimeConfig.get(); if (infinispanClientRuntimeConfig.serverList.isPresent()) { properties.put(ConfigurationProperties.SERVER_LIST, infinispanClientRuntimeConfig.serverList.get()); } if (infinispanClientRuntimeConfig.clientIntelligence.isPresent()) { properties.put(ConfigurationProperties.CLIENT_INTELLIGENCE, infinispanClientRuntimeConfig.clientIntelligence.get()); } if (infinispanClientRuntimeConfig.useAuth.isPresent()) { properties.put(ConfigurationProperties.USE_AUTH, infinispanClientRuntimeConfig.useAuth.get()); } if (infinispanClientRuntimeConfig.authUsername.isPresent()) { properties.put(ConfigurationProperties.AUTH_USERNAME, infinispanClientRuntimeConfig.authUsername.get()); } if (infinispanClientRuntimeConfig.authPassword.isPresent()) { properties.put(ConfigurationProperties.AUTH_PASSWORD, infinispanClientRuntimeConfig.authPassword.get()); } if (infinispanClientRuntimeConfig.authRealm.isPresent()) { properties.put(ConfigurationProperties.AUTH_REALM, infinispanClientRuntimeConfig.authRealm.get()); } if (infinispanClientRuntimeConfig.authServerName.isPresent()) { properties.put(ConfigurationProperties.AUTH_SERVER_NAME, infinispanClientRuntimeConfig.authServerName.get()); } if (infinispanClientRuntimeConfig.authClientSubject.isPresent()) { properties.put(ConfigurationProperties.AUTH_CLIENT_SUBJECT, infinispanClientRuntimeConfig.authClientSubject.get()); } if (infinispanClientRuntimeConfig.authCallbackHandler.isPresent()) { properties.put(ConfigurationProperties.AUTH_CALLBACK_HANDLER, infinispanClientRuntimeConfig.authCallbackHandler.get()); } if (infinispanClientRuntimeConfig.saslMechanism.isPresent()) { properties.put(ConfigurationProperties.SASL_MECHANISM, infinispanClientRuntimeConfig.saslMechanism.get()); } if (infinispanClientRuntimeConfig.trustStore.isPresent()) { properties.put(ConfigurationProperties.TRUST_STORE_FILE_NAME, infinispanClientRuntimeConfig.trustStore.get()); } if (infinispanClientRuntimeConfig.trustStorePassword.isPresent()) { properties.put(ConfigurationProperties.TRUST_STORE_PASSWORD, infinispanClientRuntimeConfig.trustStorePassword.get()); } if (infinispanClientRuntimeConfig.trustStoreType.isPresent()) { properties.put(ConfigurationProperties.TRUST_STORE_TYPE, infinispanClientRuntimeConfig.trustStoreType.get()); } builder.withProperties(properties); for (Map.Entry<String, InfinispanClientRuntimeConfig.RemoteCacheConfig> cache : infinispanClientRuntimeConfig.cache .entrySet()) { String cacheName = cache.getKey(); InfinispanClientRuntimeConfig.RemoteCacheConfig remoteCacheConfig = cache.getValue(); if (remoteCacheConfig.configurationUri.isPresent()) { URL resource = InfinispanClientProducer.class.getClassLoader() .getResource(remoteCacheConfig.configurationUri.get()); try { builder.remoteCache(cacheName).configurationURI(resource.toURI()); } catch (URISyntaxException e) { throw new RuntimeException(e); } } else if (remoteCacheConfig.configurationFile.isPresent()) { try { String content = getContents(InfinispanClientProducer.class.getClassLoader() .getResourceAsStream(remoteCacheConfig.configurationFile.get())); builder.remoteCache(cacheName).configuration(content); } catch (Exception e) { throw new RuntimeException(e); } } else if (remoteCacheConfig.configuration.isPresent()) { builder.remoteCache(cacheName).configuration(remoteCacheConfig.configuration.get()); } if (remoteCacheConfig.nearCacheMaxEntries.isPresent()) { builder.remoteCache(cacheName).nearCacheMaxEntries(remoteCacheConfig.nearCacheMaxEntries.get()); } if (remoteCacheConfig.nearCacheMode.isPresent()) { builder.remoteCache(cacheName).nearCacheMode(remoteCacheConfig.nearCacheMode.get()); } if (remoteCacheConfig.nearCacheUseBloomFilter.isPresent()) { builder.remoteCache(cacheName).nearCacheUseBloomFilter(remoteCacheConfig.nearCacheUseBloomFilter.get()); } } return builder; }
InfinispanClientRuntimeConfig.RemoteCacheConfig remoteCacheConfig = cache.getValue();
private ConfigurationBuilder builderFromProperties(Properties properties) { ConfigurationBuilder builder = new ConfigurationBuilder(); Object marshallerInstance = properties.remove(ConfigurationProperties.MARSHALLER); if (marshallerInstance != null) { if (marshallerInstance instanceof ProtoStreamMarshaller) { handleProtoStreamMarshaller((ProtoStreamMarshaller) marshallerInstance, properties, beanManager); } builder.marshaller((Marshaller) marshallerInstance); } InfinispanClientRuntimeConfig infinispanClientRuntimeConfig = this.infinispanClientRuntimeConfig.get(); if (infinispanClientRuntimeConfig.serverList.isPresent()) { properties.put(ConfigurationProperties.SERVER_LIST, infinispanClientRuntimeConfig.serverList.get()); } if (infinispanClientRuntimeConfig.clientIntelligence.isPresent()) { properties.put(ConfigurationProperties.CLIENT_INTELLIGENCE, infinispanClientRuntimeConfig.clientIntelligence.get()); } if (infinispanClientRuntimeConfig.useAuth.isPresent()) { properties.put(ConfigurationProperties.USE_AUTH, infinispanClientRuntimeConfig.useAuth.get()); } if (infinispanClientRuntimeConfig.authUsername.isPresent()) { properties.put(ConfigurationProperties.AUTH_USERNAME, infinispanClientRuntimeConfig.authUsername.get()); } if (infinispanClientRuntimeConfig.authPassword.isPresent()) { properties.put(ConfigurationProperties.AUTH_PASSWORD, infinispanClientRuntimeConfig.authPassword.get()); } if (infinispanClientRuntimeConfig.authRealm.isPresent()) { properties.put(ConfigurationProperties.AUTH_REALM, infinispanClientRuntimeConfig.authRealm.get()); } if (infinispanClientRuntimeConfig.authServerName.isPresent()) { properties.put(ConfigurationProperties.AUTH_SERVER_NAME, infinispanClientRuntimeConfig.authServerName.get()); } if (infinispanClientRuntimeConfig.authClientSubject.isPresent()) { properties.put(ConfigurationProperties.AUTH_CLIENT_SUBJECT, infinispanClientRuntimeConfig.authClientSubject.get()); } if (infinispanClientRuntimeConfig.authCallbackHandler.isPresent()) { properties.put(ConfigurationProperties.AUTH_CALLBACK_HANDLER, infinispanClientRuntimeConfig.authCallbackHandler.get()); } if (infinispanClientRuntimeConfig.saslMechanism.isPresent()) { properties.put(ConfigurationProperties.SASL_MECHANISM, infinispanClientRuntimeConfig.saslMechanism.get()); } if (infinispanClientRuntimeConfig.trustStore.isPresent()) { properties.put(ConfigurationProperties.TRUST_STORE_FILE_NAME, infinispanClientRuntimeConfig.trustStore.get()); } if (infinispanClientRuntimeConfig.trustStorePassword.isPresent()) { properties.put(ConfigurationProperties.TRUST_STORE_PASSWORD, infinispanClientRuntimeConfig.trustStorePassword.get()); } if (infinispanClientRuntimeConfig.trustStoreType.isPresent()) { properties.put(ConfigurationProperties.TRUST_STORE_TYPE, infinispanClientRuntimeConfig.trustStoreType.get()); } builder.withProperties(properties); for (Map.Entry<String, InfinispanClientRuntimeConfig.RemoteCacheConfig> cache : infinispanClientRuntimeConfig.cache .entrySet()) { String cacheName = cache.getKey(); InfinispanClientRuntimeConfig.RemoteCacheConfig remoteCacheConfig = cache.getValue(); if (remoteCacheConfig.configurationUri.isPresent()) { URL configFile = InfinispanClientProducer.class.getClassLoader() .getResource(remoteCacheConfig.configurationUri.get()); try { builder.remoteCache(cacheName).configurationURI(configFile.toURI()); } catch (Exception e) { throw new RuntimeException(e); } } else if (remoteCacheConfig.configuration.isPresent()) { builder.remoteCache(cacheName).configuration(remoteCacheConfig.configuration.get()); } if (remoteCacheConfig.nearCacheMaxEntries.isPresent()) { builder.remoteCache(cacheName).nearCacheMaxEntries(remoteCacheConfig.nearCacheMaxEntries.get()); } if (remoteCacheConfig.nearCacheMode.isPresent()) { builder.remoteCache(cacheName).nearCacheMode(remoteCacheConfig.nearCacheMode.get()); } if (remoteCacheConfig.nearCacheUseBloomFilter.isPresent()) { builder.remoteCache(cacheName).nearCacheUseBloomFilter(remoteCacheConfig.nearCacheUseBloomFilter.get()); } } return builder; }
class path to read contents of * @return string containing the contents of the file */ private static String getContents(String fileName) { InputStream stream = InfinispanClientProducer.class.getResourceAsStream(fileName); return getContents(stream); }
class path to read contents of * @return string containing the contents of the file */ private static String getContents(String fileName) { InputStream stream = InfinispanClientProducer.class.getResourceAsStream(fileName); return getContents(stream); }
I created a JIRA for this: https://issues.apache.org/jira/browse/BEAM-10611 Please add a TODO here such that we can make improvement on this in the future.
private static Value beamLogicalObjectToZetaSqlValue(Object object, String identifier) { if (SqlTypes.DATE.getIdentifier().equals(identifier)) { if (object instanceof Long) { return Value.createDateValue(((Long) object).intValue()); } else { return Value.createDateValue((int) ((LocalDate) object).toEpochDay()); } } else if (SqlTypes.TIME.getIdentifier().equals(identifier)) { if (object instanceof Long) { return Value.createTimeValue( CivilTimeEncoder.encodePacked64TimeNanos(LocalTime.ofNanoOfDay((Long) object))); } else { return Value.createTimeValue(CivilTimeEncoder.encodePacked64TimeNanos((LocalTime) object)); } } else if (SqlTypes.DATETIME.getIdentifier().equals(identifier)) { LocalDateTime datetime; if (object instanceof Row) { datetime = LocalDateTime.of( LocalDate.ofEpochDay(((Row) object).getValue("Date")), LocalTime.ofNanoOfDay(((Row) object).getValue("Time"))); } else { datetime = (LocalDateTime) object; } return Value.createDatetimeValue( CivilTimeEncoder.encodePacked64DatetimeSeconds(datetime), datetime.getNano()); } else { throw new UnsupportedOperationException("Unknown Beam logical type: " + identifier); } }
return Value.createDatetimeValue(
private static Value beamLogicalObjectToZetaSqlValue(Object object, String identifier) { if (SqlTypes.DATE.getIdentifier().equals(identifier)) { if (object instanceof Long) { return Value.createDateValue(((Long) object).intValue()); } else { return Value.createDateValue((int) ((LocalDate) object).toEpochDay()); } } else if (SqlTypes.TIME.getIdentifier().equals(identifier)) { if (object instanceof Long) { return Value.createTimeValue( CivilTimeEncoder.encodePacked64TimeNanos(LocalTime.ofNanoOfDay((Long) object))); } else { return Value.createTimeValue(CivilTimeEncoder.encodePacked64TimeNanos((LocalTime) object)); } } else if (SqlTypes.DATETIME.getIdentifier().equals(identifier)) { LocalDateTime datetime; if (object instanceof Row) { datetime = LocalDateTime.of( LocalDate.ofEpochDay(((Row) object).getInt64(DateTime.DATE_FIELD_NAME)), LocalTime.ofNanoOfDay(((Row) object).getInt64(DateTime.TIME_FIELD_NAME))); } else { datetime = (LocalDateTime) object; } return Value.createDatetimeValue( CivilTimeEncoder.encodePacked64DatetimeSeconds(datetime), datetime.getNano()); } else { throw new UnsupportedOperationException("Unknown Beam logical type: " + identifier); } }
class ZetaSqlBeamTranslationUtils { private static final long MICROS_PER_MILLI = 1000L; private ZetaSqlBeamTranslationUtils() {} public static Type beamFieldTypeToZetaSqlType(FieldType fieldType) { switch (fieldType.getTypeName()) { case INT64: return TypeFactory.createSimpleType(TypeKind.TYPE_INT64); case DOUBLE: return TypeFactory.createSimpleType(TypeKind.TYPE_DOUBLE); case BOOLEAN: return TypeFactory.createSimpleType(TypeKind.TYPE_BOOL); case STRING: return TypeFactory.createSimpleType(TypeKind.TYPE_STRING); case BYTES: return TypeFactory.createSimpleType(TypeKind.TYPE_BYTES); case DECIMAL: return TypeFactory.createSimpleType(TypeKind.TYPE_NUMERIC); case DATETIME: return TypeFactory.createSimpleType(TypeKind.TYPE_TIMESTAMP); case ARRAY: return beamElementFieldTypeToZetaSqlArrayType(fieldType.getCollectionElementType()); case ROW: return beamSchemaToZetaSqlStructType(fieldType.getRowSchema()); case LOGICAL_TYPE: return beamLogicalTypeToZetaSqlType(fieldType.getLogicalType().getIdentifier()); default: throw new UnsupportedOperationException( "Unknown Beam fieldType: " + fieldType.getTypeName()); } } private static ArrayType beamElementFieldTypeToZetaSqlArrayType(FieldType elementFieldType) { return TypeFactory.createArrayType(beamFieldTypeToZetaSqlType(elementFieldType)); } public static StructType beamSchemaToZetaSqlStructType(Schema schema) { return TypeFactory.createStructType( schema.getFields().stream() .map(ZetaSqlBeamTranslationUtils::beamFieldToZetaSqlStructField) .collect(Collectors.toList())); } private static StructField beamFieldToZetaSqlStructField(Field field) { return new StructField(field.getName(), beamFieldTypeToZetaSqlType(field.getType())); } private static Type beamLogicalTypeToZetaSqlType(String identifier) { if (SqlTypes.DATE.getIdentifier().equals(identifier)) { return TypeFactory.createSimpleType(TypeKind.TYPE_DATE); } else if (SqlTypes.TIME.getIdentifier().equals(identifier)) { return TypeFactory.createSimpleType(TypeKind.TYPE_TIME); } else if (SqlTypes.DATETIME.getIdentifier().equals(identifier)) { return TypeFactory.createSimpleType(TypeKind.TYPE_DATETIME); } else { throw new UnsupportedOperationException("Unknown Beam logical type: " + identifier); } } public static Value javaObjectToZetaSqlValue(Object object, FieldType fieldType) { if (object == null) { return Value.createNullValue(beamFieldTypeToZetaSqlType(fieldType)); } switch (fieldType.getTypeName()) { case INT64: return Value.createInt64Value((Long) object); case DOUBLE: return Value.createDoubleValue((Double) object); case BOOLEAN: return Value.createBoolValue((Boolean) object); case STRING: return Value.createStringValue((String) object); case BYTES: return Value.createBytesValue(ByteString.copyFrom((byte[]) object)); case DECIMAL: return Value.createNumericValue((BigDecimal) object); case DATETIME: return jodaInstantToZetaSqlTimestampValue((Instant) object); case ARRAY: return javaListToZetaSqlArrayValue( (List<Object>) object, fieldType.getCollectionElementType()); case ROW: return beamRowToZetaSqlStructValue((Row) object, fieldType.getRowSchema()); case LOGICAL_TYPE: return beamLogicalObjectToZetaSqlValue(object, fieldType.getLogicalType().getIdentifier()); default: throw new UnsupportedOperationException( "Unknown Beam fieldType: " + fieldType.getTypeName()); } } private static Value jodaInstantToZetaSqlTimestampValue(Instant instant) { return Value.createTimestampValueFromUnixMicros( LongMath.checkedMultiply(instant.getMillis(), MICROS_PER_MILLI)); } private static Value javaListToZetaSqlArrayValue(List<Object> elements, FieldType elementType) { List<Value> values = elements.stream() .map(e -> javaObjectToZetaSqlValue(e, elementType)) .collect(Collectors.toList()); return Value.createArrayValue(beamElementFieldTypeToZetaSqlArrayType(elementType), values); } public static Value beamRowToZetaSqlStructValue(Row row, Schema schema) { List<Value> values = new ArrayList<>(row.getFieldCount()); for (int i = 0; i < row.getFieldCount(); i++) { values.add( javaObjectToZetaSqlValue( row.getBaseValue(i, Object.class), schema.getField(i).getType())); } return Value.createStructValue(beamSchemaToZetaSqlStructType(schema), values); } public static FieldType zetaSqlTypeToBeamFieldType(Type type) { switch (type.getKind()) { case TYPE_INT64: return FieldType.INT64.withNullable(true); case TYPE_DOUBLE: return FieldType.DOUBLE.withNullable(true); case TYPE_BOOL: return FieldType.BOOLEAN.withNullable(true); case TYPE_STRING: return FieldType.STRING.withNullable(true); case TYPE_BYTES: return FieldType.BYTES.withNullable(true); case TYPE_NUMERIC: return FieldType.DECIMAL.withNullable(true); case TYPE_DATE: return FieldType.logicalType(SqlTypes.DATE).withNullable(true); case TYPE_TIME: return FieldType.logicalType(SqlTypes.TIME).withNullable(true); case TYPE_DATETIME: return FieldType.logicalType(SqlTypes.DATETIME).withNullable(true); case TYPE_TIMESTAMP: return FieldType.DATETIME.withNullable(true); case TYPE_ARRAY: return zetaSqlElementTypeToBeamArrayType(type.asArray().getElementType()); case TYPE_STRUCT: return zetaSqlStructTypeToBeamRowType(type.asStruct()); default: throw new UnsupportedOperationException("Unknown ZetaSQL type: " + type.getKind()); } } private static FieldType zetaSqlElementTypeToBeamArrayType(Type elementType) { return FieldType.array(zetaSqlTypeToBeamFieldType(elementType)).withNullable(true); } private static FieldType zetaSqlStructTypeToBeamRowType(StructType structType) { return FieldType.row( structType.getFieldList().stream() .map(ZetaSqlBeamTranslationUtils::zetaSqlStructFieldToBeamField) .collect(Schema.toSchema())) .withNullable(true); } private static Field zetaSqlStructFieldToBeamField(StructField structField) { return Field.of(structField.getName(), zetaSqlTypeToBeamFieldType(structField.getType())); } public static Object zetaSqlValueToJavaObject(Value value, boolean verifyValues) { return zetaSqlValueToJavaObject( value, zetaSqlTypeToBeamFieldType(value.getType()), verifyValues); } public static Object zetaSqlValueToJavaObject( Value value, FieldType fieldType, boolean verifyValues) { if (value.isNull()) { return null; } switch (fieldType.getTypeName()) { case INT64: return value.getInt64Value(); case DOUBLE: if (value.getType().getKind().equals(TypeKind.TYPE_INT64)) { return (double) value.getInt64Value(); } return value.getDoubleValue(); case BOOLEAN: return value.getBoolValue(); case STRING: return value.getStringValue(); case BYTES: return value.getBytesValue().toByteArray(); case DECIMAL: return value.getNumericValue(); case DATETIME: return zetaSqlTimestampValueToJodaInstant(value); case ARRAY: return zetaSqlArrayValueToJavaList( value, fieldType.getCollectionElementType(), verifyValues); case ROW: return zetaSqlStructValueToBeamRow(value, fieldType.getRowSchema(), verifyValues); case LOGICAL_TYPE: return zetaSqlValueToBeamLogicalObject(value, fieldType.getLogicalType().getIdentifier()); default: throw new UnsupportedOperationException( "Unknown Beam fieldType: " + fieldType.getTypeName()); } } private static Instant zetaSqlTimestampValueToJodaInstant(Value timestampValue) { long millis = timestampValue.getTimestampUnixMicros() / MICROS_PER_MILLI; return Instant.ofEpochMilli(millis); } private static List<Object> zetaSqlArrayValueToJavaList( Value arrayValue, FieldType elementType, boolean verifyValues) { return arrayValue.getElementList().stream() .map(e -> zetaSqlValueToJavaObject(e, elementType, verifyValues)) .collect(Collectors.toList()); } public static Row zetaSqlStructValueToBeamRow( Value structValue, Schema schema, boolean verifyValues) { List<Object> objects = new ArrayList<>(schema.getFieldCount()); List<Value> values = structValue.getFieldList(); for (int i = 0; i < values.size(); i++) { objects.add( zetaSqlValueToJavaObject(values.get(i), schema.getField(i).getType(), verifyValues)); } Row row = verifyValues ? Row.withSchema(schema).addValues(objects).build() : Row.withSchema(schema).attachValues(objects); return row; } private static Object zetaSqlValueToBeamLogicalObject(Value value, String identifier) { if (SqlTypes.DATE.getIdentifier().equals(identifier)) { return LocalDate.ofEpochDay(value.getDateValue()); } else if (SqlTypes.TIME.getIdentifier().equals(identifier)) { return CivilTimeEncoder.decodePacked64TimeNanosAsJavaTime(value.getTimeValue()); } else if (SqlTypes.DATETIME.getIdentifier().equals(identifier)) { return CivilTimeEncoder.decodePacked96DatetimeNanosAsJavaTime(value.getDatetimeValue()); } else { throw new UnsupportedOperationException("Unknown Beam logical type: " + identifier); } } }
class ZetaSqlBeamTranslationUtils { private static final long MICROS_PER_MILLI = 1000L; private ZetaSqlBeamTranslationUtils() {} public static Type beamFieldTypeToZetaSqlType(FieldType fieldType) { switch (fieldType.getTypeName()) { case INT64: return TypeFactory.createSimpleType(TypeKind.TYPE_INT64); case DOUBLE: return TypeFactory.createSimpleType(TypeKind.TYPE_DOUBLE); case BOOLEAN: return TypeFactory.createSimpleType(TypeKind.TYPE_BOOL); case STRING: return TypeFactory.createSimpleType(TypeKind.TYPE_STRING); case BYTES: return TypeFactory.createSimpleType(TypeKind.TYPE_BYTES); case DECIMAL: return TypeFactory.createSimpleType(TypeKind.TYPE_NUMERIC); case DATETIME: return TypeFactory.createSimpleType(TypeKind.TYPE_TIMESTAMP); case ARRAY: return beamElementFieldTypeToZetaSqlArrayType(fieldType.getCollectionElementType()); case ROW: return beamSchemaToZetaSqlStructType(fieldType.getRowSchema()); case LOGICAL_TYPE: return beamLogicalTypeToZetaSqlType(fieldType.getLogicalType().getIdentifier()); default: throw new UnsupportedOperationException( "Unknown Beam fieldType: " + fieldType.getTypeName()); } } private static ArrayType beamElementFieldTypeToZetaSqlArrayType(FieldType elementFieldType) { return TypeFactory.createArrayType(beamFieldTypeToZetaSqlType(elementFieldType)); } public static StructType beamSchemaToZetaSqlStructType(Schema schema) { return TypeFactory.createStructType( schema.getFields().stream() .map(ZetaSqlBeamTranslationUtils::beamFieldToZetaSqlStructField) .collect(Collectors.toList())); } private static StructField beamFieldToZetaSqlStructField(Field field) { return new StructField(field.getName(), beamFieldTypeToZetaSqlType(field.getType())); } private static Type beamLogicalTypeToZetaSqlType(String identifier) { if (SqlTypes.DATE.getIdentifier().equals(identifier)) { return TypeFactory.createSimpleType(TypeKind.TYPE_DATE); } else if (SqlTypes.TIME.getIdentifier().equals(identifier)) { return TypeFactory.createSimpleType(TypeKind.TYPE_TIME); } else if (SqlTypes.DATETIME.getIdentifier().equals(identifier)) { return TypeFactory.createSimpleType(TypeKind.TYPE_DATETIME); } else { throw new UnsupportedOperationException("Unknown Beam logical type: " + identifier); } } public static Value javaObjectToZetaSqlValue(Object object, FieldType fieldType) { if (object == null) { return Value.createNullValue(beamFieldTypeToZetaSqlType(fieldType)); } switch (fieldType.getTypeName()) { case INT64: return Value.createInt64Value((Long) object); case DOUBLE: return Value.createDoubleValue((Double) object); case BOOLEAN: return Value.createBoolValue((Boolean) object); case STRING: return Value.createStringValue((String) object); case BYTES: return Value.createBytesValue(ByteString.copyFrom((byte[]) object)); case DECIMAL: return Value.createNumericValue((BigDecimal) object); case DATETIME: return jodaInstantToZetaSqlTimestampValue((Instant) object); case ARRAY: return javaListToZetaSqlArrayValue( (List<Object>) object, fieldType.getCollectionElementType()); case ROW: return beamRowToZetaSqlStructValue((Row) object, fieldType.getRowSchema()); case LOGICAL_TYPE: return beamLogicalObjectToZetaSqlValue(object, fieldType.getLogicalType().getIdentifier()); default: throw new UnsupportedOperationException( "Unknown Beam fieldType: " + fieldType.getTypeName()); } } private static Value jodaInstantToZetaSqlTimestampValue(Instant instant) { return Value.createTimestampValueFromUnixMicros( LongMath.checkedMultiply(instant.getMillis(), MICROS_PER_MILLI)); } private static Value javaListToZetaSqlArrayValue(List<Object> elements, FieldType elementType) { List<Value> values = elements.stream() .map(e -> javaObjectToZetaSqlValue(e, elementType)) .collect(Collectors.toList()); return Value.createArrayValue(beamElementFieldTypeToZetaSqlArrayType(elementType), values); } public static Value beamRowToZetaSqlStructValue(Row row, Schema schema) { List<Value> values = new ArrayList<>(row.getFieldCount()); for (int i = 0; i < row.getFieldCount(); i++) { values.add( javaObjectToZetaSqlValue( row.getBaseValue(i, Object.class), schema.getField(i).getType())); } return Value.createStructValue(beamSchemaToZetaSqlStructType(schema), values); } public static FieldType zetaSqlTypeToBeamFieldType(Type type) { switch (type.getKind()) { case TYPE_INT64: return FieldType.INT64.withNullable(true); case TYPE_DOUBLE: return FieldType.DOUBLE.withNullable(true); case TYPE_BOOL: return FieldType.BOOLEAN.withNullable(true); case TYPE_STRING: return FieldType.STRING.withNullable(true); case TYPE_BYTES: return FieldType.BYTES.withNullable(true); case TYPE_NUMERIC: return FieldType.DECIMAL.withNullable(true); case TYPE_DATE: return FieldType.logicalType(SqlTypes.DATE).withNullable(true); case TYPE_TIME: return FieldType.logicalType(SqlTypes.TIME).withNullable(true); case TYPE_DATETIME: return FieldType.logicalType(SqlTypes.DATETIME).withNullable(true); case TYPE_TIMESTAMP: return FieldType.DATETIME.withNullable(true); case TYPE_ARRAY: return zetaSqlElementTypeToBeamArrayType(type.asArray().getElementType()); case TYPE_STRUCT: return zetaSqlStructTypeToBeamRowType(type.asStruct()); default: throw new UnsupportedOperationException("Unknown ZetaSQL type: " + type.getKind()); } } private static FieldType zetaSqlElementTypeToBeamArrayType(Type elementType) { return FieldType.array(zetaSqlTypeToBeamFieldType(elementType)).withNullable(true); } private static FieldType zetaSqlStructTypeToBeamRowType(StructType structType) { return FieldType.row( structType.getFieldList().stream() .map(ZetaSqlBeamTranslationUtils::zetaSqlStructFieldToBeamField) .collect(Schema.toSchema())) .withNullable(true); } private static Field zetaSqlStructFieldToBeamField(StructField structField) { return Field.of(structField.getName(), zetaSqlTypeToBeamFieldType(structField.getType())); } public static Object zetaSqlValueToJavaObject(Value value, boolean verifyValues) { return zetaSqlValueToJavaObject( value, zetaSqlTypeToBeamFieldType(value.getType()), verifyValues); } public static Object zetaSqlValueToJavaObject( Value value, FieldType fieldType, boolean verifyValues) { if (value.isNull()) { return null; } switch (fieldType.getTypeName()) { case INT64: return value.getInt64Value(); case DOUBLE: if (value.getType().getKind().equals(TypeKind.TYPE_INT64)) { return (double) value.getInt64Value(); } return value.getDoubleValue(); case BOOLEAN: return value.getBoolValue(); case STRING: return value.getStringValue(); case BYTES: return value.getBytesValue().toByteArray(); case DECIMAL: return value.getNumericValue(); case DATETIME: return zetaSqlTimestampValueToJodaInstant(value); case ARRAY: return zetaSqlArrayValueToJavaList( value, fieldType.getCollectionElementType(), verifyValues); case ROW: return zetaSqlStructValueToBeamRow(value, fieldType.getRowSchema(), verifyValues); case LOGICAL_TYPE: return zetaSqlValueToBeamLogicalObject(value, fieldType.getLogicalType().getIdentifier()); default: throw new UnsupportedOperationException( "Unknown Beam fieldType: " + fieldType.getTypeName()); } } private static Instant zetaSqlTimestampValueToJodaInstant(Value timestampValue) { long millis = timestampValue.getTimestampUnixMicros() / MICROS_PER_MILLI; return Instant.ofEpochMilli(millis); } private static List<Object> zetaSqlArrayValueToJavaList( Value arrayValue, FieldType elementType, boolean verifyValues) { return arrayValue.getElementList().stream() .map(e -> zetaSqlValueToJavaObject(e, elementType, verifyValues)) .collect(Collectors.toList()); } public static Row zetaSqlStructValueToBeamRow( Value structValue, Schema schema, boolean verifyValues) { List<Object> objects = new ArrayList<>(schema.getFieldCount()); List<Value> values = structValue.getFieldList(); for (int i = 0; i < values.size(); i++) { objects.add( zetaSqlValueToJavaObject(values.get(i), schema.getField(i).getType(), verifyValues)); } Row row = verifyValues ? Row.withSchema(schema).addValues(objects).build() : Row.withSchema(schema).attachValues(objects); return row; } private static Object zetaSqlValueToBeamLogicalObject(Value value, String identifier) { if (SqlTypes.DATE.getIdentifier().equals(identifier)) { return LocalDate.ofEpochDay(value.getDateValue()); } else if (SqlTypes.TIME.getIdentifier().equals(identifier)) { return CivilTimeEncoder.decodePacked64TimeNanosAsJavaTime(value.getTimeValue()); } else if (SqlTypes.DATETIME.getIdentifier().equals(identifier)) { return CivilTimeEncoder.decodePacked96DatetimeNanosAsJavaTime(value.getDatetimeValue()); } else { throw new UnsupportedOperationException("Unknown Beam logical type: " + identifier); } } }
@glefloch can you think of a trick to make `quarkusGenerateCodeDev` run only when the intention is to launch the dev mode? Perhaps, the task can be disabled by default and enabled by the `quarkusDev` somehow?
private void registerTasks(Project project, QuarkusPluginExtension quarkusExt) { TaskContainer tasks = project.getTasks(); tasks.create(LIST_EXTENSIONS_TASK_NAME, QuarkusListExtensions.class); tasks.create(LIST_CATEGORIES_TASK_NAME, QuarkusListCategories.class); tasks.create(LIST_PLATFORMS_TASK_NAME, QuarkusListPlatforms.class); tasks.create(ADD_EXTENSION_TASK_NAME, QuarkusAddExtension.class); tasks.create(REMOVE_EXTENSION_TASK_NAME, QuarkusRemoveExtension.class); QuarkusGenerateCode quarkusGenerateCode = tasks.create(QUARKUS_GENERATE_CODE_TASK_NAME, QuarkusGenerateCode.class); QuarkusGenerateCode quarkusGenerateCodeDev = tasks.create(QUARKUS_GENERATE_CODE_DEV_TASK_NAME, QuarkusGenerateCode.class); quarkusGenerateCodeDev.setDevMode(true); QuarkusGenerateCode quarkusGenerateCodeTests = tasks.create(QUARKUS_GENERATE_CODE_TESTS_TASK_NAME, QuarkusGenerateCode.class); quarkusGenerateCodeTests.setTest(true); Task quarkusBuild = tasks.create(QUARKUS_BUILD_TASK_NAME, QuarkusBuild.class); quarkusBuild.dependsOn(quarkusGenerateCode); QuarkusDev quarkusDev = tasks.create(QUARKUS_DEV_TASK_NAME, QuarkusDev.class); Task quarkusRemoteDev = tasks.create(QUARKUS_REMOTE_DEV_TASK_NAME, QuarkusRemoteDev.class); Task quarkusTest = tasks.create(QUARKUS_TEST_TASK_NAME, QuarkusTest.class); tasks.create(QUARKUS_TEST_CONFIG_TASK_NAME, QuarkusTestConfig.class); Task buildNative = tasks.create(BUILD_NATIVE_TASK_NAME, DefaultTask.class); buildNative.finalizedBy(quarkusBuild); buildNative.doFirst(t -> project.getLogger() .warn("The 'buildNative' task has been deprecated in favor of 'build -Dquarkus.package.type=native'")); configureBuildNativeTask(project); final Consumer<Test> configureTestTask = t -> { t.doFirst(new Action<Task>() { @Override public void execute(Task test) { quarkusExt.beforeTest(t); } }); t.useJUnitPlatform(); quarkusBuild.shouldRunAfter(t); }; project.getPlugins().withType( BasePlugin.class, basePlugin -> tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(quarkusBuild)); project.getPlugins().withType( JavaPlugin.class, javaPlugin -> { project.afterEvaluate(this::afterEvaluate); ConfigurationContainer configurations = project.getConfigurations(); JavaCompile compileJavaTask = (JavaCompile) tasks.getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME); compileJavaTask.mustRunAfter(quarkusGenerateCodeDev); compileJavaTask.mustRunAfter(quarkusGenerateCode); JavaCompile compileTestJavaTask = (JavaCompile) tasks.getByName(JavaPlugin.COMPILE_TEST_JAVA_TASK_NAME); compileTestJavaTask.dependsOn(quarkusGenerateCodeTests); Task classesTask = tasks.getByName(JavaPlugin.CLASSES_TASK_NAME); Task resourcesTask = tasks.getByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME); Task testClassesTask = tasks.getByName(JavaPlugin.TEST_CLASSES_TASK_NAME); Task testResourcesTask = tasks.getByName(JavaPlugin.PROCESS_TEST_RESOURCES_TASK_NAME); quarkusGenerateCode.dependsOn(resourcesTask); quarkusGenerateCodeDev.dependsOn(resourcesTask); quarkusGenerateCodeTests.dependsOn(resourcesTask); quarkusDev.dependsOn(classesTask, resourcesTask, testClassesTask, testResourcesTask, quarkusGenerateCodeDev, quarkusGenerateCodeTests); quarkusRemoteDev.dependsOn(classesTask, resourcesTask); quarkusTest.dependsOn(classesTask, resourcesTask, testClassesTask, testResourcesTask, quarkusGenerateCode, quarkusGenerateCodeTests); quarkusBuild.dependsOn(classesTask, resourcesTask, tasks.getByName(JavaPlugin.JAR_TASK_NAME)); SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class) .getSourceSets(); SourceSet nativeTestSourceSet = sourceSets.create(NATIVE_TEST_SOURCE_SET_NAME); SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); SourceSet testSourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME); quarkusGenerateCode.setSourcesDirectories(getSourcesParents(mainSourceSet)); quarkusGenerateCodeDev.setSourcesDirectories(getSourcesParents(mainSourceSet)); quarkusGenerateCodeTests.setSourcesDirectories(getSourcesParents(testSourceSet)); nativeTestSourceSet.setCompileClasspath( nativeTestSourceSet.getCompileClasspath() .plus(mainSourceSet.getOutput()) .plus(testSourceSet.getOutput())); nativeTestSourceSet.setRuntimeClasspath( nativeTestSourceSet.getRuntimeClasspath() .plus(mainSourceSet.getOutput()) .plus(testSourceSet.getOutput())); configurations.create(DEV_MODE_CONFIGURATION_NAME); configurations.maybeCreate(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME) .extendsFrom(configurations.findByName(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME)); configurations.maybeCreate(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME) .extendsFrom(configurations.findByName(JavaPlugin.TEST_RUNTIME_ONLY_CONFIGURATION_NAME)); Task testNative = tasks.create(TEST_NATIVE_TASK_NAME, QuarkusTestNative.class); testNative.dependsOn(quarkusBuild); testNative.setShouldRunAfter(Collections.singletonList(tasks.findByName(JavaPlugin.TEST_TASK_NAME))); tasks.withType(Test.class).forEach(configureTestTask); tasks.withType(Test.class).whenTaskAdded(configureTestTask::accept); SourceSet generatedSourceSet = sourceSets.create(QuarkusGenerateCode.QUARKUS_GENERATED_SOURCES); SourceSet generatedTestSourceSet = sourceSets.create(QuarkusGenerateCode.QUARKUS_TEST_GENERATED_SOURCES); for (String provider : QuarkusGenerateCode.CODE_GENERATION_PROVIDER) { mainSourceSet.getJava().srcDir( new File(generatedSourceSet.getJava().getClassesDirectory().get().getAsFile(), provider)); testSourceSet.getJava().srcDir( new File(generatedTestSourceSet.getJava().getClassesDirectory().get().getAsFile(), provider)); } configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME) .getResolutionStrategy().eachDependency(d -> { if ("quarkus-panache-common".equals(d.getTarget().getName()) && "io.quarkus".equals(d.getTarget().getGroup())) { project.getDependencies().add("annotationProcessor", d.getRequested().getGroup() + ":" + d.getRequested().getName() + ":" + d.getRequested().getVersion()); } }); }); project.getPlugins().withId("org.jetbrains.kotlin.jvm", plugin -> { quarkusDev.shouldPropagateJavaCompilerArgs(false); tasks.getByName("compileKotlin").mustRunAfter(quarkusGenerateCode); tasks.getByName("compileKotlin").mustRunAfter(quarkusGenerateCodeDev); tasks.getByName("compileTestKotlin").dependsOn(quarkusGenerateCodeTests); }); }
compileJavaTask.mustRunAfter(quarkusGenerateCode);
private void registerTasks(Project project, QuarkusPluginExtension quarkusExt) { TaskContainer tasks = project.getTasks(); tasks.create(LIST_EXTENSIONS_TASK_NAME, QuarkusListExtensions.class); tasks.create(LIST_CATEGORIES_TASK_NAME, QuarkusListCategories.class); tasks.create(LIST_PLATFORMS_TASK_NAME, QuarkusListPlatforms.class); tasks.create(ADD_EXTENSION_TASK_NAME, QuarkusAddExtension.class); tasks.create(REMOVE_EXTENSION_TASK_NAME, QuarkusRemoveExtension.class); QuarkusGenerateCode quarkusGenerateCode = tasks.create(QUARKUS_GENERATE_CODE_TASK_NAME, QuarkusGenerateCode.class); QuarkusGenerateCode quarkusGenerateCodeDev = tasks.create(QUARKUS_GENERATE_CODE_DEV_TASK_NAME, QuarkusGenerateCode.class); quarkusGenerateCodeDev.setDevMode(true); QuarkusGenerateCode quarkusGenerateCodeTests = tasks.create(QUARKUS_GENERATE_CODE_TESTS_TASK_NAME, QuarkusGenerateCode.class); quarkusGenerateCodeTests.setTest(true); Task quarkusBuild = tasks.create(QUARKUS_BUILD_TASK_NAME, QuarkusBuild.class); quarkusBuild.dependsOn(quarkusGenerateCode); QuarkusDev quarkusDev = tasks.create(QUARKUS_DEV_TASK_NAME, QuarkusDev.class); Task quarkusRemoteDev = tasks.create(QUARKUS_REMOTE_DEV_TASK_NAME, QuarkusRemoteDev.class); Task quarkusTest = tasks.create(QUARKUS_TEST_TASK_NAME, QuarkusTest.class); tasks.create(QUARKUS_TEST_CONFIG_TASK_NAME, QuarkusTestConfig.class); Task buildNative = tasks.create(BUILD_NATIVE_TASK_NAME, DefaultTask.class); buildNative.finalizedBy(quarkusBuild); buildNative.doFirst(t -> project.getLogger() .warn("The 'buildNative' task has been deprecated in favor of 'build -Dquarkus.package.type=native'")); configureBuildNativeTask(project); final Consumer<Test> configureTestTask = t -> { t.doFirst(new Action<Task>() { @Override public void execute(Task test) { quarkusExt.beforeTest(t); } }); t.useJUnitPlatform(); quarkusBuild.shouldRunAfter(t); }; project.getPlugins().withType( BasePlugin.class, basePlugin -> tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(quarkusBuild)); project.getPlugins().withType( JavaPlugin.class, javaPlugin -> { project.afterEvaluate(this::afterEvaluate); ConfigurationContainer configurations = project.getConfigurations(); JavaCompile compileJavaTask = (JavaCompile) tasks.getByName(JavaPlugin.COMPILE_JAVA_TASK_NAME); compileJavaTask.mustRunAfter(quarkusGenerateCodeDev); compileJavaTask.mustRunAfter(quarkusGenerateCode); JavaCompile compileTestJavaTask = (JavaCompile) tasks.getByName(JavaPlugin.COMPILE_TEST_JAVA_TASK_NAME); compileTestJavaTask.dependsOn(quarkusGenerateCodeTests); Task classesTask = tasks.getByName(JavaPlugin.CLASSES_TASK_NAME); Task resourcesTask = tasks.getByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME); Task testClassesTask = tasks.getByName(JavaPlugin.TEST_CLASSES_TASK_NAME); Task testResourcesTask = tasks.getByName(JavaPlugin.PROCESS_TEST_RESOURCES_TASK_NAME); quarkusGenerateCode.dependsOn(resourcesTask); quarkusGenerateCodeDev.dependsOn(resourcesTask); quarkusGenerateCodeTests.dependsOn(resourcesTask); quarkusDev.dependsOn(classesTask, resourcesTask, testClassesTask, testResourcesTask, quarkusGenerateCodeDev, quarkusGenerateCodeTests); quarkusRemoteDev.dependsOn(classesTask, resourcesTask); quarkusTest.dependsOn(classesTask, resourcesTask, testClassesTask, testResourcesTask, quarkusGenerateCode, quarkusGenerateCodeTests); quarkusBuild.dependsOn(classesTask, resourcesTask, tasks.getByName(JavaPlugin.JAR_TASK_NAME)); SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class) .getSourceSets(); SourceSet nativeTestSourceSet = sourceSets.create(NATIVE_TEST_SOURCE_SET_NAME); SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); SourceSet testSourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME); quarkusGenerateCode.setSourcesDirectories(getSourcesParents(mainSourceSet)); quarkusGenerateCodeDev.setSourcesDirectories(getSourcesParents(mainSourceSet)); quarkusGenerateCodeTests.setSourcesDirectories(getSourcesParents(testSourceSet)); nativeTestSourceSet.setCompileClasspath( nativeTestSourceSet.getCompileClasspath() .plus(mainSourceSet.getOutput()) .plus(testSourceSet.getOutput())); nativeTestSourceSet.setRuntimeClasspath( nativeTestSourceSet.getRuntimeClasspath() .plus(mainSourceSet.getOutput()) .plus(testSourceSet.getOutput())); configurations.create(DEV_MODE_CONFIGURATION_NAME); configurations.maybeCreate(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME) .extendsFrom(configurations.findByName(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME)); configurations.maybeCreate(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME) .extendsFrom(configurations.findByName(JavaPlugin.TEST_RUNTIME_ONLY_CONFIGURATION_NAME)); Task testNative = tasks.create(TEST_NATIVE_TASK_NAME, QuarkusTestNative.class); testNative.dependsOn(quarkusBuild); testNative.setShouldRunAfter(Collections.singletonList(tasks.findByName(JavaPlugin.TEST_TASK_NAME))); tasks.withType(Test.class).forEach(configureTestTask); tasks.withType(Test.class).whenTaskAdded(configureTestTask::accept); SourceSet generatedSourceSet = sourceSets.create(QuarkusGenerateCode.QUARKUS_GENERATED_SOURCES); SourceSet generatedTestSourceSet = sourceSets.create(QuarkusGenerateCode.QUARKUS_TEST_GENERATED_SOURCES); for (String provider : QuarkusGenerateCode.CODE_GENERATION_PROVIDER) { mainSourceSet.getJava().srcDir( new File(generatedSourceSet.getJava().getClassesDirectory().get().getAsFile(), provider)); testSourceSet.getJava().srcDir( new File(generatedTestSourceSet.getJava().getClassesDirectory().get().getAsFile(), provider)); } configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME) .getResolutionStrategy().eachDependency(d -> { if ("quarkus-panache-common".equals(d.getTarget().getName()) && "io.quarkus".equals(d.getTarget().getGroup())) { project.getDependencies().add("annotationProcessor", d.getRequested().getGroup() + ":" + d.getRequested().getName() + ":" + d.getRequested().getVersion()); } }); }); project.getPlugins().withId("org.jetbrains.kotlin.jvm", plugin -> { quarkusDev.shouldPropagateJavaCompilerArgs(false); tasks.getByName("compileKotlin").mustRunAfter(quarkusGenerateCode); tasks.getByName("compileKotlin").mustRunAfter(quarkusGenerateCodeDev); tasks.getByName("compileTestKotlin").dependsOn(quarkusGenerateCodeTests); }); }
class QuarkusPlugin implements Plugin<Project> { public static final String ID = "io.quarkus"; public static final String QUARKUS_PACKAGE_TYPE = "quarkus.package.type"; public static final String EXTENSION_NAME = "quarkus"; public static final String LIST_EXTENSIONS_TASK_NAME = "listExtensions"; public static final String LIST_CATEGORIES_TASK_NAME = "listCategories"; public static final String LIST_PLATFORMS_TASK_NAME = "listPlatforms"; public static final String ADD_EXTENSION_TASK_NAME = "addExtension"; public static final String REMOVE_EXTENSION_TASK_NAME = "removeExtension"; public static final String QUARKUS_GENERATE_CODE_TASK_NAME = "quarkusGenerateCode"; public static final String QUARKUS_GENERATE_CODE_DEV_TASK_NAME = "quarkusGenerateCodeDev"; public static final String QUARKUS_GENERATE_CODE_TESTS_TASK_NAME = "quarkusGenerateCodeTests"; public static final String QUARKUS_BUILD_TASK_NAME = "quarkusBuild"; public static final String QUARKUS_DEV_TASK_NAME = "quarkusDev"; public static final String QUARKUS_REMOTE_DEV_TASK_NAME = "quarkusRemoteDev"; public static final String QUARKUS_TEST_TASK_NAME = "quarkusTest"; public static final String DEV_MODE_CONFIGURATION_NAME = "quarkusDev"; @Deprecated public static final String BUILD_NATIVE_TASK_NAME = "buildNative"; public static final String TEST_NATIVE_TASK_NAME = "testNative"; @Deprecated public static final String QUARKUS_TEST_CONFIG_TASK_NAME = "quarkusTestConfig"; public static final String NATIVE_TEST_SOURCE_SET_NAME = "native-test"; public static final String NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME = "nativeTestImplementation"; public static final String NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME = "nativeTestRuntimeOnly"; private final ToolingModelBuilderRegistry registry; @Inject public QuarkusPlugin(ToolingModelBuilderRegistry registry) { this.registry = registry; } @Override public void apply(Project project) { verifyGradleVersion(); registerModel(); final QuarkusPluginExtension quarkusExt = project.getExtensions().create(EXTENSION_NAME, QuarkusPluginExtension.class, project); registerTasks(project, quarkusExt); } private void registerConditionalDependencies(Project project) { ConditionalDependenciesEnabler conditionalDependenciesEnabler = new ConditionalDependenciesEnabler(project); ApplicationDeploymentClasspathBuilder deploymentClasspathBuilder = new ApplicationDeploymentClasspathBuilder( project); project.getConfigurations().getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME).getIncoming() .beforeResolve((dependencies) -> { Set<ExtensionDependency> implementationExtensions = conditionalDependenciesEnabler .declareConditionalDependencies(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME); deploymentClasspathBuilder.createBuildClasspath(implementationExtensions, JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME); }); project.getConfigurations().getByName(DEV_MODE_CONFIGURATION_NAME).getIncoming().beforeResolve((devDependencies) -> { Set<ExtensionDependency> devModeExtensions = conditionalDependenciesEnabler .declareConditionalDependencies(DEV_MODE_CONFIGURATION_NAME); deploymentClasspathBuilder.createBuildClasspath(devModeExtensions, DEV_MODE_CONFIGURATION_NAME); }); project.getConfigurations().getByName(JavaPlugin.TEST_COMPILE_CLASSPATH_CONFIGURATION_NAME).getIncoming() .beforeResolve((testDependencies) -> { Set<ExtensionDependency> testExtensions = conditionalDependenciesEnabler .declareConditionalDependencies(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME); deploymentClasspathBuilder.createBuildClasspath(testExtensions, JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME); }); } private Set<Path> getSourcesParents(SourceSet mainSourceSet) { Set<File> srcDirs = mainSourceSet.getJava().getSrcDirs(); return srcDirs.stream() .map(File::toPath) .map(Path::getParent) .collect(Collectors.toSet()); } private void registerModel() { registry.register(new GradleApplicationModelBuilder()); } private void verifyGradleVersion() { if (GradleVersion.current().compareTo(GradleVersion.version("6.1")) < 0) { throw new GradleException("Quarkus plugin requires Gradle 6.1 or later. Current version is: " + GradleVersion.current()); } } private void configureBuildNativeTask(Project project) { project.getGradle().getTaskGraph().whenReady(taskGraph -> { if (taskGraph.hasTask(project.getPath() + BUILD_NATIVE_TASK_NAME) || taskGraph.hasTask(project.getPath() + TEST_NATIVE_TASK_NAME)) { project.getExtensions().getExtraProperties() .set(QUARKUS_PACKAGE_TYPE, "native"); } }); } private void afterEvaluate(Project project) { registerConditionalDependencies(project); final HashSet<String> visited = new HashSet<>(); ConfigurationContainer configurations = project.getConfigurations(); configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME) .getIncoming().getDependencies() .forEach(d -> { if (d instanceof ProjectDependency) { visitProjectDep(project, ((ProjectDependency) d).getDependencyProject(), visited); } }); SourceSetExtension sourceSetExtension = project.getExtensions().getByType(QuarkusPluginExtension.class) .sourceSetExtension(); if (sourceSetExtension.extraNativeTest() != null) { SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class) .getSourceSets(); SourceSet nativeTestSourceSets = sourceSets.getByName(NATIVE_TEST_SOURCE_SET_NAME); nativeTestSourceSets.setCompileClasspath( nativeTestSourceSets.getCompileClasspath().plus(sourceSetExtension.extraNativeTest().getOutput())); nativeTestSourceSets.setRuntimeClasspath( nativeTestSourceSets.getRuntimeClasspath().plus(sourceSetExtension.extraNativeTest().getOutput())); configurations.findByName(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME).extendsFrom( configurations.findByName(sourceSetExtension.extraNativeTest().getImplementationConfigurationName())); configurations.findByName(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME).extendsFrom( configurations.findByName(sourceSetExtension.extraNativeTest().getRuntimeOnlyConfigurationName())); QuarkusTestNative nativeTest = (QuarkusTestNative) project.getTasks().getByName(TEST_NATIVE_TASK_NAME); nativeTest.setTestClassesDirs(nativeTestSourceSets.getOutput().getClassesDirs()); nativeTest.setClasspath(nativeTestSourceSets.getRuntimeClasspath()); } } private void visitProjectDep(Project project, Project dep, Set<String> visited) { if (dep.getState().getExecuted()) { setupQuarkusBuildTaskDeps(project, dep, visited); } else { dep.afterEvaluate(p -> { setupQuarkusBuildTaskDeps(project, p, visited); }); } } private void setupQuarkusBuildTaskDeps(Project project, Project dep, Set<String> visited) { if (!visited.add(dep.getPath())) { return; } project.getLogger().debug("Configuring {} task dependencies on {} tasks", project, dep); final Task quarkusBuild = project.getTasks().findByName(QUARKUS_BUILD_TASK_NAME); if (quarkusBuild != null) { final Task jarTask = dep.getTasks().findByName(JavaPlugin.JAR_TASK_NAME); if (jarTask != null) { final Task quarkusPrepare = project.getTasks().findByName(QUARKUS_GENERATE_CODE_TASK_NAME); final Task quarkusPrepareTests = project.getTasks().findByName(QUARKUS_GENERATE_CODE_TESTS_TASK_NAME); quarkusBuild.dependsOn(jarTask); if (quarkusPrepare != null) { quarkusPrepare.dependsOn(jarTask); } if (quarkusPrepareTests != null) { quarkusPrepareTests.dependsOn(jarTask); } } } final Task quarkusDev = project.getTasks().findByName(QUARKUS_DEV_TASK_NAME); if (quarkusDev != null) { final Task resourcesTask = dep.getTasks().findByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME); if (resourcesTask != null) { quarkusDev.dependsOn(resourcesTask); } final Task resourcesTaskJandex = dep.getTasks().findByName("jandex"); if (resourcesTaskJandex != null) { quarkusDev.dependsOn(resourcesTaskJandex); } } final Configuration compileConfig = dep.getConfigurations().findByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME); if (compileConfig != null) { compileConfig.getIncoming().getDependencies() .forEach(d -> { if (d instanceof ProjectDependency) { visitProjectDep(project, ((ProjectDependency) d).getDependencyProject(), visited); } }); } } }
class QuarkusPlugin implements Plugin<Project> { public static final String ID = "io.quarkus"; public static final String QUARKUS_PACKAGE_TYPE = "quarkus.package.type"; public static final String EXTENSION_NAME = "quarkus"; public static final String LIST_EXTENSIONS_TASK_NAME = "listExtensions"; public static final String LIST_CATEGORIES_TASK_NAME = "listCategories"; public static final String LIST_PLATFORMS_TASK_NAME = "listPlatforms"; public static final String ADD_EXTENSION_TASK_NAME = "addExtension"; public static final String REMOVE_EXTENSION_TASK_NAME = "removeExtension"; public static final String QUARKUS_GENERATE_CODE_TASK_NAME = "quarkusGenerateCode"; public static final String QUARKUS_GENERATE_CODE_DEV_TASK_NAME = "quarkusGenerateCodeDev"; public static final String QUARKUS_GENERATE_CODE_TESTS_TASK_NAME = "quarkusGenerateCodeTests"; public static final String QUARKUS_BUILD_TASK_NAME = "quarkusBuild"; public static final String QUARKUS_DEV_TASK_NAME = "quarkusDev"; public static final String QUARKUS_REMOTE_DEV_TASK_NAME = "quarkusRemoteDev"; public static final String QUARKUS_TEST_TASK_NAME = "quarkusTest"; public static final String DEV_MODE_CONFIGURATION_NAME = "quarkusDev"; @Deprecated public static final String BUILD_NATIVE_TASK_NAME = "buildNative"; public static final String TEST_NATIVE_TASK_NAME = "testNative"; @Deprecated public static final String QUARKUS_TEST_CONFIG_TASK_NAME = "quarkusTestConfig"; public static final String NATIVE_TEST_SOURCE_SET_NAME = "native-test"; public static final String NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME = "nativeTestImplementation"; public static final String NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME = "nativeTestRuntimeOnly"; private final ToolingModelBuilderRegistry registry; @Inject public QuarkusPlugin(ToolingModelBuilderRegistry registry) { this.registry = registry; } @Override public void apply(Project project) { verifyGradleVersion(); registerModel(); final QuarkusPluginExtension quarkusExt = project.getExtensions().create(EXTENSION_NAME, QuarkusPluginExtension.class, project); registerTasks(project, quarkusExt); } private void registerConditionalDependencies(Project project) { ConditionalDependenciesEnabler conditionalDependenciesEnabler = new ConditionalDependenciesEnabler(project); ApplicationDeploymentClasspathBuilder deploymentClasspathBuilder = new ApplicationDeploymentClasspathBuilder( project); project.getConfigurations().getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME).getIncoming() .beforeResolve((dependencies) -> { Set<ExtensionDependency> implementationExtensions = conditionalDependenciesEnabler .declareConditionalDependencies(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME); deploymentClasspathBuilder.createBuildClasspath(implementationExtensions, JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME); }); project.getConfigurations().getByName(DEV_MODE_CONFIGURATION_NAME).getIncoming().beforeResolve((devDependencies) -> { Set<ExtensionDependency> devModeExtensions = conditionalDependenciesEnabler .declareConditionalDependencies(DEV_MODE_CONFIGURATION_NAME); deploymentClasspathBuilder.createBuildClasspath(devModeExtensions, DEV_MODE_CONFIGURATION_NAME); }); project.getConfigurations().getByName(JavaPlugin.TEST_COMPILE_CLASSPATH_CONFIGURATION_NAME).getIncoming() .beforeResolve((testDependencies) -> { Set<ExtensionDependency> testExtensions = conditionalDependenciesEnabler .declareConditionalDependencies(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME); deploymentClasspathBuilder.createBuildClasspath(testExtensions, JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME); }); } private Set<Path> getSourcesParents(SourceSet mainSourceSet) { Set<File> srcDirs = mainSourceSet.getJava().getSrcDirs(); return srcDirs.stream() .map(File::toPath) .map(Path::getParent) .collect(Collectors.toSet()); } private void registerModel() { registry.register(new GradleApplicationModelBuilder()); } private void verifyGradleVersion() { if (GradleVersion.current().compareTo(GradleVersion.version("6.1")) < 0) { throw new GradleException("Quarkus plugin requires Gradle 6.1 or later. Current version is: " + GradleVersion.current()); } } private void configureBuildNativeTask(Project project) { project.getGradle().getTaskGraph().whenReady(taskGraph -> { if (taskGraph.hasTask(project.getPath() + BUILD_NATIVE_TASK_NAME) || taskGraph.hasTask(project.getPath() + TEST_NATIVE_TASK_NAME)) { project.getExtensions().getExtraProperties() .set(QUARKUS_PACKAGE_TYPE, "native"); } }); } private void afterEvaluate(Project project) { registerConditionalDependencies(project); final HashSet<String> visited = new HashSet<>(); ConfigurationContainer configurations = project.getConfigurations(); configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME) .getIncoming().getDependencies() .forEach(d -> { if (d instanceof ProjectDependency) { visitProjectDep(project, ((ProjectDependency) d).getDependencyProject(), visited); } }); SourceSetExtension sourceSetExtension = project.getExtensions().getByType(QuarkusPluginExtension.class) .sourceSetExtension(); if (sourceSetExtension.extraNativeTest() != null) { SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class) .getSourceSets(); SourceSet nativeTestSourceSets = sourceSets.getByName(NATIVE_TEST_SOURCE_SET_NAME); nativeTestSourceSets.setCompileClasspath( nativeTestSourceSets.getCompileClasspath().plus(sourceSetExtension.extraNativeTest().getOutput())); nativeTestSourceSets.setRuntimeClasspath( nativeTestSourceSets.getRuntimeClasspath().plus(sourceSetExtension.extraNativeTest().getOutput())); configurations.findByName(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME).extendsFrom( configurations.findByName(sourceSetExtension.extraNativeTest().getImplementationConfigurationName())); configurations.findByName(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME).extendsFrom( configurations.findByName(sourceSetExtension.extraNativeTest().getRuntimeOnlyConfigurationName())); QuarkusTestNative nativeTest = (QuarkusTestNative) project.getTasks().getByName(TEST_NATIVE_TASK_NAME); nativeTest.setTestClassesDirs(nativeTestSourceSets.getOutput().getClassesDirs()); nativeTest.setClasspath(nativeTestSourceSets.getRuntimeClasspath()); } } private void visitProjectDep(Project project, Project dep, Set<String> visited) { if (dep.getState().getExecuted()) { setupQuarkusBuildTaskDeps(project, dep, visited); } else { dep.afterEvaluate(p -> { setupQuarkusBuildTaskDeps(project, p, visited); }); } } private void setupQuarkusBuildTaskDeps(Project project, Project dep, Set<String> visited) { if (!visited.add(dep.getPath())) { return; } project.getLogger().debug("Configuring {} task dependencies on {} tasks", project, dep); final Task quarkusBuild = project.getTasks().findByName(QUARKUS_BUILD_TASK_NAME); if (quarkusBuild != null) { final Task jarTask = dep.getTasks().findByName(JavaPlugin.JAR_TASK_NAME); if (jarTask != null) { final Task quarkusPrepare = project.getTasks().findByName(QUARKUS_GENERATE_CODE_TASK_NAME); final Task quarkusPrepareDev = project.getTasks().findByName(QUARKUS_GENERATE_CODE_DEV_TASK_NAME); final Task quarkusPrepareTests = project.getTasks().findByName(QUARKUS_GENERATE_CODE_TESTS_TASK_NAME); quarkusBuild.dependsOn(jarTask); if (quarkusPrepare != null) { quarkusPrepare.dependsOn(jarTask); } if (quarkusPrepareDev != null) { quarkusPrepareDev.dependsOn(jarTask); } if (quarkusPrepareTests != null) { quarkusPrepareTests.dependsOn(jarTask); } } } final Task quarkusDev = project.getTasks().findByName(QUARKUS_DEV_TASK_NAME); if (quarkusDev != null) { final Task resourcesTask = dep.getTasks().findByName(JavaPlugin.PROCESS_RESOURCES_TASK_NAME); if (resourcesTask != null) { quarkusDev.dependsOn(resourcesTask); } final Task resourcesTaskJandex = dep.getTasks().findByName("jandex"); if (resourcesTaskJandex != null) { quarkusDev.dependsOn(resourcesTaskJandex); } } final Configuration compileConfig = dep.getConfigurations().findByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME); if (compileConfig != null) { compileConfig.getIncoming().getDependencies() .forEach(d -> { if (d instanceof ProjectDependency) { visitProjectDep(project, ((ProjectDependency) d).getDependencyProject(), visited); } }); } } }
As of now, no. We may eventually need to support both import and provided but currently we have no use case for them.
static List<BomDependency> parsePomFileContent(Reader responseStream) { List<BomDependency> bomDependencies = new ArrayList<>(); ObjectMapper mapper = new XmlMapper(); try { HashMap<String, Object> value = mapper.readValue(responseStream, HashMap.class); Object packagingProp = value.getOrDefault("packaging", null); if(packagingProp != null && packagingProp.toString().equalsIgnoreCase("pom")) { return parsePomFileContent(responseStream); } HashMap<String, Object> dependenciesTag = (HashMap<String, Object>)value.getOrDefault("dependencies", null); if(dependenciesTag == null) { return null; } ArrayList<HashMap<String, Object>> dependencies = (ArrayList<HashMap<String, Object>>) dependenciesTag.getOrDefault("dependency", null); for(HashMap<String, Object> dependency: dependencies) { String groupId = (String) dependency.getOrDefault("groupId", null); String artifactId = (String) dependency.getOrDefault("artifactId", null); String version = (String) dependency.getOrDefault("version", null); String scope = (String) dependency.getOrDefault("scope", ScopeType.COMPILE.toString()); ScopeType scopeType = ScopeType.COMPILE; switch(scope) { case "test" : scopeType = ScopeType.TEST; break; default: scopeType = ScopeType.COMPILE; } bomDependencies.add(new BomDependency(groupId, artifactId, version, scopeType)); } } catch (IOException exception) { exception.printStackTrace(); } return bomDependencies.stream().distinct().collect(Collectors.toList()); }
default: scopeType = ScopeType.COMPILE;
static List<BomDependency> parsePomFileContent(Reader responseStream) { List<BomDependency> bomDependencies = new ArrayList<>(); ObjectMapper mapper = new XmlMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); try { Model value = mapper.readValue(responseStream, Model.class); List<Dependency> dependencies = value.getDependencies(); if(dependencies == null) { return bomDependencies; } for(Dependency dependency : dependencies) { ScopeType scopeType = ScopeType.COMPILE; if("test".equals(dependency.getScope())) { scopeType = ScopeType.TEST; } bomDependencies.add(new BomDependency( dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion(), scopeType)); } } catch (IOException exception) { exception.printStackTrace(); } return bomDependencies.stream().distinct().collect(Collectors.toList()); }
class Utils { public static final String COMMANDLINE_INPUTDIRECTORY = "inputdir"; public static final String COMMANDLINE_OUTPUTDIRECTORY = "outputdir"; public static final String EMPTY_STRING = ""; public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_OVERRIDDEN_INPUTDEPENDENCIES_FILE = "inputdependenciesfile"; public static final String COMMANDLINE_REPORTFILE = "reportfile"; public static final String COMMANDLINE_MODE = "mode"; public static final String ANALYZE_MODE = "analyze"; public static final String GENERATE_MODE = "generate"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final Pattern STRING_SPLIT_BY_COLON = Pattern.compile("[:]"); public static final Pattern INPUT_DEPENDENCY_PATTERN = Pattern.compile("(.+);(.*)"); public static final String PROJECT_VERSION = "project.version"; public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final HashSet<String> IGNORE_CONFLICT_LIST = new HashSet<>(/*Arrays.asList( "slf4j-api" )*/); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); static void validateNotNullOrEmpty(String argValue, String argName) { if(argValue == null || argValue.isEmpty()) { throw new NullPointerException(String.format("%s can't be null", argName)); } } static void validateNotNullOrEmpty(String[] argValue, String argName) { if(Arrays.stream(argValue).anyMatch(value -> value == null || value.isEmpty())) { throw new IllegalArgumentException(String.format("%s can't be null", argName)); } } static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } static boolean isPublishedArtifact(BomDependency dependency) { try { return getResolvedArtifact(dependency) != null; } catch (Exception ex) { logger.error(ex.toString()); } return false; } static MavenResolvedArtifact getResolvedArtifact(MavenDependency dependency) { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return mavenResolvedArtifact; } static void validateNull(String argValue, String argName) { if(argValue != null) { throw new IllegalArgumentException(String.format("%s should be null", argName)); } } static void validateValues(String argName, String argValue, String ... expectedValues) { if(Arrays.stream(expectedValues).noneMatch(a -> a.equals(argValue))) { throw new IllegalArgumentException(String.format("%s must match %s", argName, Arrays.toString(expectedValues))); } } static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { try (InputStreamReader reader = new InputStreamReader(response.body())) { return Utils.parsePomFileContent(reader); } catch (IOException ex) { logger.error("Failed to read contents for {}", dependency.toString()); } } return null; }).join(); } static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } static List<BomDependency> parsePomFileContent(String fileName) { try (FileReader reader = new FileReader(fileName)) { return parsePomFileContent(reader); } catch (IOException exception) { logger.error("Failed to read the contents of the pom file: {}", fileName); } return new ArrayList<>(); } static List<BomDependency> parseBomFileContent(Reader responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); if(version.equals(PROJECT_VERSION)) { version = model.getVersion(); } } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
class Utils { public static final String COMMANDLINE_INPUTDIRECTORY = "inputdir"; public static final String COMMANDLINE_OUTPUTDIRECTORY = "outputdir"; public static final String COMMANDLINE_MODE = "mode"; public static final String ANALYZE_MODE = "analyze"; public static final String GENERATE_MODE = "generate"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final Pattern STRING_SPLIT_BY_COLON = Pattern.compile("[:]"); public static final Pattern INPUT_DEPENDENCY_PATTERN = Pattern.compile("(.+);(.*)"); public static final String PROJECT_VERSION = "project.version"; public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final HashSet<String> IGNORE_CONFLICT_LIST = new HashSet<>(/*Arrays.asList( "slf4j-api" )*/); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); static void validateNotNullOrEmpty(String argValue, String argName) { if(argValue == null || argValue.isEmpty()) { throw new NullPointerException(String.format("%s can't be null", argName)); } } static void validateNotNullOrEmpty(String[] argValue, String argName) { if(Arrays.stream(argValue).anyMatch(value -> value == null || value.isEmpty())) { throw new IllegalArgumentException(String.format("%s can't be null", argName)); } } static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } static boolean isPublishedArtifact(BomDependency dependency) { try { return getResolvedArtifact(dependency) != null; } catch (Exception ex) { logger.error(ex.toString()); } return false; } static MavenResolvedArtifact getResolvedArtifact(MavenDependency dependency) { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return mavenResolvedArtifact; } static void validateNull(String argValue, String argName) { if(argValue != null) { throw new IllegalArgumentException(String.format("%s should be null", argName)); } } static void validateValues(String argName, String argValue, String ... expectedValues) { if(Arrays.stream(expectedValues).noneMatch(a -> a.equals(argValue))) { throw new IllegalArgumentException(String.format("%s must match %s", argName, Arrays.toString(expectedValues))); } } static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { try (InputStreamReader reader = new InputStreamReader(response.body())) { return Utils.parsePomFileContent(reader); } catch (IOException ex) { logger.error("Failed to read contents for {}", dependency.toString()); } } return null; }).join(); } static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } static List<BomDependency> parsePomFileContent(String fileName) { try (FileReader reader = new FileReader(fileName)) { return parsePomFileContent(reader); } catch (IOException exception) { logger.error("Failed to read the contents of the pom file: {}", fileName); } return new ArrayList<>(); } static List<BomDependency> parseBomFileContent(Reader responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); if(version.equals(PROJECT_VERSION)) { version = model.getVersion(); } } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
With 864788ea19e77a6292976f2b999a8c933034371d we simply rewrite the build file
public void save() { Path buildFilePath = this.sourceRoot.resolve(TARGET_DIR_NAME).resolve(BUILD_FILE); boolean shouldUpdate = this.currentPackage().getResolution().autoUpdate(); if (!buildFilePath.toFile().exists()) { createBuildFile(buildFilePath); writeBuildFile(buildFilePath); writeDependencies(); } else { try { BuildJson buildJson = readBuildJson(buildFilePath); writeDependencies(); if (buildJson != null) { if (shouldUpdate) { writeBuildFile(buildFilePath); } else { buildJson.setLastBuildTime(System.currentTimeMillis()); ProjectUtils.writeBuildFile(buildFilePath, buildJson); } } else { writeBuildFile(buildFilePath); } } catch (JsonSyntaxException e) { throw new ProjectException("Invalid '" + BUILD_FILE + "' file format"); } catch (IOException e) { throw new ProjectException("Failed to read the '" + BUILD_FILE + "' file"); } } }
writeDependencies();
public void save() { Path buildFilePath = this.sourceRoot.resolve(TARGET_DIR_NAME).resolve(BUILD_FILE); boolean shouldUpdate = this.currentPackage().getResolution().autoUpdate(); if (!buildFilePath.toFile().exists()) { createBuildFile(buildFilePath); writeBuildFile(buildFilePath); writeDependencies(); } else { BuildJson buildJson = null; try { buildJson = readBuildJson(buildFilePath); } catch (JsonSyntaxException | IOException e) { } writeDependencies(); if (buildJson != null && !shouldUpdate) { buildJson.setLastBuildTime(System.currentTimeMillis()); writeBuildFile(buildFilePath, buildJson); } else { writeBuildFile(buildFilePath); } } }
class BuildProject extends Project { /** * Loads a BuildProject from the provided path. * * @param projectPath Ballerina project path * @return build project */ public static BuildProject load(ProjectEnvironmentBuilder environmentBuilder, Path projectPath) { return load(environmentBuilder, projectPath, new BuildOptionsBuilder().build()); } /** * Loads a BuildProject from the provided path. * * @param projectPath Ballerina project path * @return BuildProject instance */ public static BuildProject load(Path projectPath) { return load(projectPath, new BuildOptionsBuilder().build()); } /** * Loads a BuildProject from provided path and build options. * * @param projectPath Ballerina project path * @param buildOptions build options * @return BuildProject instance */ public static BuildProject load(Path projectPath, BuildOptions buildOptions) { ProjectEnvironmentBuilder environmentBuilder = ProjectEnvironmentBuilder.getDefaultBuilder(); return load(environmentBuilder, projectPath, buildOptions); } /** * Loads a BuildProject from provided environment builder, path, build options. * * @param environmentBuilder custom environment builder * @param projectPath Ballerina project path * @param buildOptions build options * @return BuildProject instance */ public static BuildProject load(ProjectEnvironmentBuilder environmentBuilder, Path projectPath, BuildOptions buildOptions) { PackageConfig packageConfig = PackageConfigCreator.createBuildProjectConfig(projectPath); BuildOptions mergedBuildOptions = ProjectFiles.createBuildOptions(packageConfig, buildOptions, projectPath); BuildProject buildProject = new BuildProject(environmentBuilder, projectPath, mergedBuildOptions); buildProject.addPackage(packageConfig); return buildProject; } private BuildProject(ProjectEnvironmentBuilder environmentBuilder, Path projectPath, BuildOptions buildOptions) { super(ProjectKind.BUILD_PROJECT, projectPath, environmentBuilder, buildOptions); populateCompilerContext(); } private Optional<Path> modulePath(ModuleId moduleId) { if (currentPackage().moduleIds().contains(moduleId)) { if (currentPackage().getDefaultModule().moduleId() == moduleId) { return Optional.of(sourceRoot); } else { return Optional.of(sourceRoot.resolve(ProjectConstants.MODULES_ROOT).resolve( currentPackage().module(moduleId).moduleName().moduleNamePart())); } } return Optional.empty(); } @Override public Optional<Path> documentPath(DocumentId documentId) { for (ModuleId moduleId : currentPackage().moduleIds()) { Module module = currentPackage().module(moduleId); Optional<Path> modulePath = modulePath(moduleId); if (module.documentIds().contains(documentId)) { if (modulePath.isPresent()) { return Optional.of(modulePath.get().resolve(module.document(documentId).name())); } } else if (module.testDocumentIds().contains(documentId)) { if (modulePath.isPresent()) { return Optional.of(modulePath.get() .resolve(ProjectConstants.TEST_DIR_NAME).resolve( module.document(documentId).name().split(ProjectConstants.TEST_DIR_NAME + "/")[1])); } } } return Optional.empty(); } @Override public DocumentId documentId(Path file) { if (isFilePathInProject(file)) { Path parent = Optional.of(file.toAbsolutePath().getParent()).get(); for (ModuleId moduleId : this.currentPackage().moduleIds()) { String moduleDirName; if (!this.currentPackage().getDefaultModule().moduleId().equals(moduleId)) { moduleDirName = moduleId.moduleName() .split(this.currentPackage().packageName().toString() + "\\.")[1]; } else { moduleDirName = Optional.of(this.sourceRoot.getFileName()).get().toString(); } if (Optional.of(parent.getFileName()).get().toString().equals(moduleDirName) || Optional.of( Optional.of(parent.getParent()).get().getFileName()).get().toString().equals(moduleDirName)) { Module module = this.currentPackage().module(moduleId); for (DocumentId documentId : module.documentIds()) { if (module.document(documentId).name().equals( Optional.of(file.getFileName()).get().toString())) { return documentId; } } for (DocumentId documentId : module.testDocumentIds()) { if (module.document(documentId).name().split(ProjectConstants.TEST_DIR_NAME + "/")[1] .equals(Optional.of(file.getFileName()).get().toString())) { return documentId; } } } } } throw new ProjectException("provided path does not belong to the project"); } private boolean isFilePathInProject(Path filepath) { try { ProjectPaths.packageRoot(filepath); } catch (ProjectException e) { return false; } return true; } private void writeDependencies() { Package currentPackage = this.currentPackage(); if (currentPackage != null) { Comparator<Dependency> comparator = (o1, o2) -> { if (o1.getOrg().equals(o2.getOrg())) { return o1.getName().compareTo(o2.getName()); } return o1.getOrg().compareTo(o2.getOrg()); }; List<Dependency> pkgDependencies = getPackageDependencies(); pkgDependencies.sort(comparator); Path dependenciesTomlFile = currentPackage.project().sourceRoot().resolve(DEPENDENCIES_TOML); String dependenciesContent = getDependenciesTomlContent(pkgDependencies); if (!pkgDependencies.isEmpty()) { createIfNotExists(dependenciesTomlFile); writeContent(dependenciesTomlFile, dependenciesContent); } else { if (dependenciesTomlFile.toFile().exists()) { writeContent(dependenciesTomlFile, dependenciesContent); } } } } private List<Dependency> getPackageDependencies() { PackageResolution packageResolution = this.currentPackage().getResolution(); ResolvedPackageDependency rootPkgNode = new ResolvedPackageDependency(this.currentPackage(), PackageDependencyScope.DEFAULT); DependencyGraph<ResolvedPackageDependency> dependencyGraph = packageResolution.dependencyGraph(); Collection<ResolvedPackageDependency> directDependencies = dependencyGraph.getDirectDependencies(rootPkgNode); List<Dependency> dependencies = new ArrayList<>(); Package rootPackage = rootPkgNode.packageInstance(); Dependency rootPkgDependency = new Dependency(rootPackage.packageOrg().value(), rootPackage.packageName().value(), rootPackage.packageVersion().value().toString()); List<Dependency.Module> rootPkgModules = new ArrayList<>(); for (ModuleId moduleId : rootPackage.moduleIds()) { Module module = rootPackage.module(moduleId); Dependency.Module depsModule = new Dependency.Module(module.descriptor().org().value(), module.descriptor().packageName().value(), module.descriptor().name().toString()); rootPkgModules.add(depsModule); } rootPkgModules.sort(Comparator.comparing(Dependency.Module::moduleName)); rootPkgDependency.setModules(rootPkgModules); rootPkgDependency.setDependencies(getTransitiveDependencies(dependencyGraph, rootPkgNode)); rootPkgDependency.setTransitive(false); rootPkgDependency.setScope(rootPkgNode.scope()); dependencies.add(rootPkgDependency); for (ResolvedPackageDependency directDependency : directDependencies) { Package aPackage = directDependency.packageInstance(); Dependency dependency = new Dependency(aPackage.packageOrg().toString(), aPackage.packageName().value(), aPackage.packageVersion().toString()); BalaFiles.DependencyGraphResult packageDependencyGraph = BalaFiles .createPackageDependencyGraph(directDependency.packageInstance().project().sourceRoot()); Set<ModuleDescriptor> moduleDescriptors = packageDependencyGraph.moduleDependencies().keySet(); List<Dependency.Module> modules = new ArrayList<>(); for (ModuleDescriptor moduleDescriptor : moduleDescriptors) { Dependency.Module module = new Dependency.Module(moduleDescriptor.org().value(), moduleDescriptor.packageName().value(), moduleDescriptor.name().toString()); modules.add(module); } modules.sort(Comparator.comparing(Dependency.Module::moduleName)); dependency.setModules(modules); dependency.setDependencies(getTransitiveDependencies(dependencyGraph, directDependency)); dependency.setScope(directDependency.scope()); dependency.setTransitive(false); dependencies.add(dependency); } Collection<ResolvedPackageDependency> allDependencies = dependencyGraph.getNodes(); for (ResolvedPackageDependency transDependency : allDependencies) { if (directDependencies.contains(transDependency)) { continue; } if (transDependency.packageInstance() != this.currentPackage()) { Package aPackage = transDependency.packageInstance(); Dependency dependency = new Dependency(aPackage.packageOrg().toString(), aPackage.packageName().value(), aPackage.packageVersion().toString()); dependency.setDependencies(getTransitiveDependencies(dependencyGraph, transDependency)); dependency.setScope(transDependency.scope()); dependency.setTransitive(true); dependencies.add(dependency); } } return dependencies; } private List<Dependency> getTransitiveDependencies(DependencyGraph<ResolvedPackageDependency> dependencyGraph, ResolvedPackageDependency directDependency) { List<Dependency> dependencyList = new ArrayList<>(); Collection<ResolvedPackageDependency> pkgDependencies = dependencyGraph .getDirectDependencies(directDependency); for (ResolvedPackageDependency resolvedTransitiveDep : pkgDependencies) { Package dependencyPkgContext = resolvedTransitiveDep.packageInstance(); Dependency dep = new Dependency(dependencyPkgContext.packageOrg().toString(), dependencyPkgContext.packageName().value(), dependencyPkgContext.packageVersion().toString()); dependencyList.add(dep); } Comparator<Dependency> comparator = (o1, o2) -> { if (o1.getOrg().equals(o2.getOrg())) { return o1.getName().compareTo(o2.getName()); } return o1.getOrg().compareTo(o2.getOrg()); }; dependencyList.sort(comparator); return dependencyList; } private static void createIfNotExists(Path filePath) { if (!filePath.toFile().exists()) { try { Files.createFile(filePath); } catch (IOException e) { throw new ProjectException("Failed to create 'Dependencies.toml' file to write dependencies"); } } } private static void writeContent(Path filePath, String content) { try { Files.write(filePath, Collections.singleton(content)); } catch (IOException e) { throw new ProjectException("Failed to write dependencies to the 'Dependencies.toml' file"); } } private static void createBuildFile(Path buildFilePath) { try { if (!buildFilePath.getParent().toFile().exists()) { Files.createDirectory(buildFilePath.getParent()); } Files.createFile(buildFilePath); } catch (IOException e) { throw new ProjectException("Failed to create '" + BUILD_FILE + "' file"); } } private static void writeBuildFile(Path buildFilePath) { BuildJson buildJson = new BuildJson(System.currentTimeMillis(), System.currentTimeMillis()); ProjectUtils.writeBuildFile(buildFilePath, buildJson); } }
class BuildProject extends Project { /** * Loads a BuildProject from the provided path. * * @param projectPath Ballerina project path * @return build project */ public static BuildProject load(ProjectEnvironmentBuilder environmentBuilder, Path projectPath) { return load(environmentBuilder, projectPath, new BuildOptionsBuilder().build()); } /** * Loads a BuildProject from the provided path. * * @param projectPath Ballerina project path * @return BuildProject instance */ public static BuildProject load(Path projectPath) { return load(projectPath, new BuildOptionsBuilder().build()); } /** * Loads a BuildProject from provided path and build options. * * @param projectPath Ballerina project path * @param buildOptions build options * @return BuildProject instance */ public static BuildProject load(Path projectPath, BuildOptions buildOptions) { ProjectEnvironmentBuilder environmentBuilder = ProjectEnvironmentBuilder.getDefaultBuilder(); return load(environmentBuilder, projectPath, buildOptions); } /** * Loads a BuildProject from provided environment builder, path, build options. * * @param environmentBuilder custom environment builder * @param projectPath Ballerina project path * @param buildOptions build options * @return BuildProject instance */ public static BuildProject load(ProjectEnvironmentBuilder environmentBuilder, Path projectPath, BuildOptions buildOptions) { PackageConfig packageConfig = PackageConfigCreator.createBuildProjectConfig(projectPath); BuildOptions mergedBuildOptions = ProjectFiles.createBuildOptions(packageConfig, buildOptions, projectPath); BuildProject buildProject = new BuildProject(environmentBuilder, projectPath, mergedBuildOptions); buildProject.addPackage(packageConfig); return buildProject; } private BuildProject(ProjectEnvironmentBuilder environmentBuilder, Path projectPath, BuildOptions buildOptions) { super(ProjectKind.BUILD_PROJECT, projectPath, environmentBuilder, buildOptions); populateCompilerContext(); } private Optional<Path> modulePath(ModuleId moduleId) { if (currentPackage().moduleIds().contains(moduleId)) { if (currentPackage().getDefaultModule().moduleId() == moduleId) { return Optional.of(sourceRoot); } else { return Optional.of(sourceRoot.resolve(ProjectConstants.MODULES_ROOT).resolve( currentPackage().module(moduleId).moduleName().moduleNamePart())); } } return Optional.empty(); } @Override public Optional<Path> documentPath(DocumentId documentId) { for (ModuleId moduleId : currentPackage().moduleIds()) { Module module = currentPackage().module(moduleId); Optional<Path> modulePath = modulePath(moduleId); if (module.documentIds().contains(documentId)) { if (modulePath.isPresent()) { return Optional.of(modulePath.get().resolve(module.document(documentId).name())); } } else if (module.testDocumentIds().contains(documentId)) { if (modulePath.isPresent()) { return Optional.of(modulePath.get() .resolve(ProjectConstants.TEST_DIR_NAME).resolve( module.document(documentId).name().split(ProjectConstants.TEST_DIR_NAME + "/")[1])); } } } return Optional.empty(); } @Override public DocumentId documentId(Path file) { if (isFilePathInProject(file)) { Path parent = Optional.of(file.toAbsolutePath().getParent()).get(); for (ModuleId moduleId : this.currentPackage().moduleIds()) { String moduleDirName; if (!this.currentPackage().getDefaultModule().moduleId().equals(moduleId)) { moduleDirName = moduleId.moduleName() .split(this.currentPackage().packageName().toString() + "\\.")[1]; } else { moduleDirName = Optional.of(this.sourceRoot.getFileName()).get().toString(); } if (Optional.of(parent.getFileName()).get().toString().equals(moduleDirName) || Optional.of( Optional.of(parent.getParent()).get().getFileName()).get().toString().equals(moduleDirName)) { Module module = this.currentPackage().module(moduleId); for (DocumentId documentId : module.documentIds()) { if (module.document(documentId).name().equals( Optional.of(file.getFileName()).get().toString())) { return documentId; } } for (DocumentId documentId : module.testDocumentIds()) { if (module.document(documentId).name().split(ProjectConstants.TEST_DIR_NAME + "/")[1] .equals(Optional.of(file.getFileName()).get().toString())) { return documentId; } } } } } throw new ProjectException("provided path does not belong to the project"); } private boolean isFilePathInProject(Path filepath) { try { ProjectPaths.packageRoot(filepath); } catch (ProjectException e) { return false; } return true; } private void writeDependencies() { Package currentPackage = this.currentPackage(); if (currentPackage != null) { Comparator<Dependency> comparator = (o1, o2) -> { if (o1.getOrg().equals(o2.getOrg())) { return o1.getName().compareTo(o2.getName()); } return o1.getOrg().compareTo(o2.getOrg()); }; List<Dependency> pkgDependencies = getPackageDependencies(); pkgDependencies.sort(comparator); Path dependenciesTomlFile = currentPackage.project().sourceRoot().resolve(DEPENDENCIES_TOML); String dependenciesContent = getDependenciesTomlContent(pkgDependencies); if (!pkgDependencies.isEmpty()) { createIfNotExists(dependenciesTomlFile); writeContent(dependenciesTomlFile, dependenciesContent); } else { if (dependenciesTomlFile.toFile().exists()) { writeContent(dependenciesTomlFile, dependenciesContent); } } } } private List<Dependency> getPackageDependencies() { PackageResolution packageResolution = this.currentPackage().getResolution(); ResolvedPackageDependency rootPkgNode = new ResolvedPackageDependency(this.currentPackage(), PackageDependencyScope.DEFAULT); DependencyGraph<ResolvedPackageDependency> dependencyGraph = packageResolution.dependencyGraph(); Collection<ResolvedPackageDependency> directDependencies = dependencyGraph.getDirectDependencies(rootPkgNode); List<Dependency> dependencies = new ArrayList<>(); Package rootPackage = rootPkgNode.packageInstance(); Dependency rootPkgDependency = new Dependency(rootPackage.packageOrg().value(), rootPackage.packageName().value(), rootPackage.packageVersion().value().toString()); List<Dependency.Module> rootPkgModules = new ArrayList<>(); for (ModuleId moduleId : rootPackage.moduleIds()) { Module module = rootPackage.module(moduleId); Dependency.Module depsModule = new Dependency.Module(module.descriptor().org().value(), module.descriptor().packageName().value(), module.descriptor().name().toString()); rootPkgModules.add(depsModule); } rootPkgModules.sort(Comparator.comparing(Dependency.Module::moduleName)); rootPkgDependency.setModules(rootPkgModules); rootPkgDependency.setDependencies(getTransitiveDependencies(dependencyGraph, rootPkgNode)); rootPkgDependency.setTransitive(false); rootPkgDependency.setScope(rootPkgNode.scope()); dependencies.add(rootPkgDependency); for (ResolvedPackageDependency directDependency : directDependencies) { Package aPackage = directDependency.packageInstance(); Dependency dependency = new Dependency(aPackage.packageOrg().toString(), aPackage.packageName().value(), aPackage.packageVersion().toString()); BalaFiles.DependencyGraphResult packageDependencyGraph = BalaFiles .createPackageDependencyGraph(directDependency.packageInstance().project().sourceRoot()); Set<ModuleDescriptor> moduleDescriptors = packageDependencyGraph.moduleDependencies().keySet(); List<Dependency.Module> modules = new ArrayList<>(); for (ModuleDescriptor moduleDescriptor : moduleDescriptors) { Dependency.Module module = new Dependency.Module(moduleDescriptor.org().value(), moduleDescriptor.packageName().value(), moduleDescriptor.name().toString()); modules.add(module); } modules.sort(Comparator.comparing(Dependency.Module::moduleName)); dependency.setModules(modules); dependency.setDependencies(getTransitiveDependencies(dependencyGraph, directDependency)); dependency.setScope(directDependency.scope()); dependency.setTransitive(false); dependencies.add(dependency); } Collection<ResolvedPackageDependency> allDependencies = dependencyGraph.getNodes(); for (ResolvedPackageDependency transDependency : allDependencies) { if (directDependencies.contains(transDependency)) { continue; } if (transDependency.packageInstance() != this.currentPackage()) { Package aPackage = transDependency.packageInstance(); Dependency dependency = new Dependency(aPackage.packageOrg().toString(), aPackage.packageName().value(), aPackage.packageVersion().toString()); dependency.setDependencies(getTransitiveDependencies(dependencyGraph, transDependency)); dependency.setScope(transDependency.scope()); dependency.setTransitive(true); dependencies.add(dependency); } } return dependencies; } private List<Dependency> getTransitiveDependencies(DependencyGraph<ResolvedPackageDependency> dependencyGraph, ResolvedPackageDependency directDependency) { List<Dependency> dependencyList = new ArrayList<>(); Collection<ResolvedPackageDependency> pkgDependencies = dependencyGraph .getDirectDependencies(directDependency); for (ResolvedPackageDependency resolvedTransitiveDep : pkgDependencies) { Package dependencyPkgContext = resolvedTransitiveDep.packageInstance(); Dependency dep = new Dependency(dependencyPkgContext.packageOrg().toString(), dependencyPkgContext.packageName().value(), dependencyPkgContext.packageVersion().toString()); dependencyList.add(dep); } Comparator<Dependency> comparator = (o1, o2) -> { if (o1.getOrg().equals(o2.getOrg())) { return o1.getName().compareTo(o2.getName()); } return o1.getOrg().compareTo(o2.getOrg()); }; dependencyList.sort(comparator); return dependencyList; } private static void createIfNotExists(Path filePath) { if (!filePath.toFile().exists()) { try { Files.createFile(filePath); } catch (IOException e) { throw new ProjectException("Failed to create 'Dependencies.toml' file to write dependencies"); } } } private static void writeContent(Path filePath, String content) { try { Files.write(filePath, Collections.singleton(content)); } catch (IOException e) { throw new ProjectException("Failed to write dependencies to the 'Dependencies.toml' file"); } } private static void createBuildFile(Path buildFilePath) { try { if (!buildFilePath.getParent().toFile().exists()) { Files.createDirectory(buildFilePath.getParent()); } Files.createFile(buildFilePath); } catch (IOException e) { throw new ProjectException("Failed to create '" + BUILD_FILE + "' file"); } } private static void writeBuildFile(Path buildFilePath) { BuildJson buildJson = new BuildJson(System.currentTimeMillis(), System.currentTimeMillis()); writeBuildFile(buildFilePath, buildJson); } private static void writeBuildFile(Path buildFilePath, BuildJson buildJson) { Gson gson = new GsonBuilder().setPrettyPrinting().create(); if (!buildFilePath.toFile().canWrite()) { return; } try { Files.write(buildFilePath, Collections.singleton(gson.toJson(buildJson))); } catch (IOException e) { } } }
Can you modify the kernel logic instead of sql parse logic?
private BinaryOperationExpression createPatternMatchingOperationSegment(final AExprContext ctx) { String operator = getOriginalText(ctx.patternMatchingOperator()).toUpperCase(); ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ListExpression right = new ListExpression(ctx.aExpr(1).start.getStartIndex(), ctx.aExpr().get(ctx.aExpr().size() - 1).stop.getStopIndex()); for (int i = 1; i < ctx.aExpr().size(); i++) { right.getItems().add((ExpressionSegment) visit(ctx.aExpr().get(i))); } String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); }
String operator = getOriginalText(ctx.patternMatchingOperator()).toUpperCase();
private BinaryOperationExpression createPatternMatchingOperationSegment(final AExprContext ctx) { String operator = getOriginalText(ctx.patternMatchingOperator()).toUpperCase(); ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ListExpression right = new ListExpression(ctx.aExpr(1).start.getStartIndex(), ctx.aExpr().get(ctx.aExpr().size() - 1).stop.getStopIndex()); for (int i = 1; i < ctx.aExpr().size(); i++) { right.getItems().add((ExpressionSegment) visit(ctx.aExpr().get(i))); } String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); }
class PostgreSQLStatementSQLVisitor extends PostgreSQLStatementParserBaseVisitor<ASTNode> { private final Collection<ParameterMarkerSegment> parameterMarkerSegments = new LinkedList<>(); public PostgreSQLStatementSQLVisitor(final Properties props) { } @Override public final ASTNode visitParameterMarker(final ParameterMarkerContext ctx) { if (null == ctx.DOLLAR_()) { return new ParameterMarkerValue(parameterMarkerSegments.size(), ParameterMarkerType.QUESTION); } return new ParameterMarkerValue(new NumberLiteralValue(ctx.NUMBER_().getText()).getValue().intValue() - 1, ParameterMarkerType.DOLLAR); } @Override public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { return new NumberLiteralValue(ctx.NUMBER_().getText()); } @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitUnreservedWord(final UnreservedWordContext ctx) { return new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitSchemaName(final SchemaNameContext ctx) { return visit(ctx.identifier()); } @Override public final ASTNode visitTableName(final TableNameContext ctx) { SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(), ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name()))); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitColumnName(final ColumnNameContext ctx) { ColumnSegment result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitIndexName(final IndexNameContext ctx) { IndexNameSegment indexName = new IndexNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier())); return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName); } @Override public final ASTNode visitConstraintName(final ConstraintNameContext ctx) { return new ConstraintSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier())); } @Override public final ASTNode visitTableNames(final TableNamesContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); for (TableNameContext each : ctx.tableName()) { result.getValue().add((SimpleTableSegment) visit(each)); } return result; } @Override public final ASTNode visitColumnNames(final ColumnNamesContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); for (ColumnNameContext each : ctx.columnName()) { result.getValue().add((ColumnSegment) visit(each)); } return result; } @Override public ASTNode visitAExpr(final AExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } if (null != ctx.BETWEEN()) { return createBetweenSegment(ctx); } if (null != ctx.IN()) { return createInSegment(ctx); } if (null != ctx.patternMatchingOperator()) { return createPatternMatchingOperationSegment(ctx); } Optional<String> binaryOperator = findBinaryOperator(ctx); if (binaryOperator.isPresent()) { return createBinaryOperationSegment(ctx, binaryOperator.get()); } super.visitAExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text); } private Optional<String> findBinaryOperator(final AExprContext ctx) { if (null != ctx.IS()) { return Optional.of(ctx.IS().getText()); } if (null != ctx.ISNULL()) { return Optional.of("IS"); } if (1 == ctx.aExpr().size()) { return Optional.empty(); } if (null != ctx.comparisonOperator()) { return Optional.of(ctx.comparisonOperator().getText()); } if (null != ctx.andOperator()) { return Optional.of(ctx.andOperator().getText()); } if (null != ctx.orOperator()) { return Optional.of(ctx.orOperator().getText()); } if (null != ctx.PLUS_()) { return Optional.of(ctx.PLUS_().getText()); } if (null != ctx.MINUS_()) { return Optional.of(ctx.MINUS_().getText()); } if (null != ctx.ASTERISK_()) { return Optional.of(ctx.ASTERISK_().getText()); } if (null != ctx.SLASH_()) { return Optional.of(ctx.SLASH_().getText()); } return Optional.empty(); } private BinaryOperationExpression createBinaryOperationSegment(final AExprContext ctx, final String operator) { if ("IS".equalsIgnoreCase(operator)) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); String rightText; ExpressionSegment right; if (null != ctx.IS()) { rightText = ctx.start.getInputStream().getText(new Interval(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText); } else { rightText = ctx.start.getInputStream().getText(new Interval(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex(), rightText); } return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, "IS", ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()))); } ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.aExpr(1)); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @Override public ASTNode visitCExpr(final CExprContext ctx) { if (null != ctx.columnref()) { return visit(ctx.columnref()); } if (null != ctx.parameterMarker()) { ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker()); ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), parameterMarker.getValue(), parameterMarker.getType()); parameterMarkerSegments.add(result); return result; } if (null != ctx.aexprConst()) { return visit(ctx.aexprConst()); } if (null != ctx.aExpr()) { return visit(ctx.aExpr()); } if (null != ctx.funcExpr()) { return visit(ctx.funcExpr()); } if (null != ctx.selectWithParens()) { return createSubqueryExpressionSegment(ctx); } if (null != ctx.caseExpr()) { return visit(ctx.caseExpr()); } super.visitCExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), text); } private ExpressionSegment createSubqueryExpressionSegment(final CExprContext ctx) { SubquerySegment subquerySegment = new SubquerySegment(ctx.selectWithParens().getStart().getStartIndex(), ctx.selectWithParens().getStop().getStopIndex(), (PostgreSQLSelectStatement) visit(ctx.selectWithParens())); return null == ctx.EXISTS() ? new SubqueryExpressionSegment(subquerySegment) : new ExistsSubqueryExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment); } @Override public ASTNode visitCaseExpr(final CaseExprContext ctx) { Collection<ExpressionSegment> whenExprs = new LinkedList<>(); Collection<ExpressionSegment> thenExprs = new LinkedList<>(); for (WhenClauseContext each : ctx.whenClauseList().whenClause()) { whenExprs.add((ExpressionSegment) visit(each.aExpr(0))); thenExprs.add((ExpressionSegment) visit(each.aExpr(1))); } ExpressionSegment caseExpr = null == ctx.caseArg() ? null : (ExpressionSegment) visit(ctx.caseArg().aExpr()); ExpressionSegment elseExpr = null == ctx.caseDefault() ? null : (ExpressionSegment) visit(ctx.caseDefault().aExpr()); return new CaseWhenExpression(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), caseExpr, whenExprs, thenExprs, elseExpr); } @Override public ASTNode visitFuncExpr(final FuncExprContext ctx) { if (null != ctx.functionExprCommonSubexpr()) { return visit(ctx.functionExprCommonSubexpr()); } Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); String aggregationType = ctx.funcApplication().funcName().getText(); if (AggregationType.isAggregationType(aggregationType)) { return createAggregationSegment(ctx.funcApplication(), aggregationType, expressionSegments); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.funcApplication().funcName().getText(), getOriginalText(ctx)); result.getParameters().addAll(expressionSegments); return result; } @Override public ASTNode visitFunctionExprCommonSubexpr(final FunctionExprCommonSubexprContext ctx) { if (null != ctx.CAST()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getChild(0).getText(), getOriginalText(ctx)); Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); result.getParameters().addAll(expressionSegments); return result; } private <T extends ParseTree> Collection<T> getTargetRuleContextFromParseTree(final ParseTree parseTree, final Class<? extends T> clazz) { Collection<T> result = new LinkedList<>(); for (int index = 0; index < parseTree.getChildCount(); index++) { ParseTree child = parseTree.getChild(index); if (clazz.isInstance(child)) { result.add(clazz.cast(child)); } else { result.addAll(getTargetRuleContextFromParseTree(child, clazz)); } } return result; } private Collection<ExpressionSegment> getExpressionSegments(final Collection<AExprContext> aExprContexts) { Collection<ExpressionSegment> result = new LinkedList<>(); for (AExprContext each : aExprContexts) { result.add((ExpressionSegment) visit(each)); } return result; } @Override public ASTNode visitAexprConst(final AexprConstContext ctx) { LiteralValue<?> value; if (null != ctx.numberConst()) { value = new NumberLiteralValue(ctx.numberConst().getText()); } else if (null != ctx.STRING_()) { value = new StringLiteralValue(ctx.STRING_().getText()); } else if (null != ctx.FALSE()) { value = new BooleanLiteralValue(ctx.FALSE().getText()); } else if (null != ctx.TRUE()) { value = new BooleanLiteralValue(ctx.TRUE().getText()); } else if (null != ctx.NULL()) { value = new NullLiteralValue(ctx.getText()); } else { value = new OtherLiteralValue(ctx.getText()); } if (null != ctx.constTypeName() || null != ctx.funcName() && null == ctx.LP_()) { LiteralExpressionSegment expression = new LiteralExpressionSegment(ctx.STRING_().getSymbol().getStartIndex(), ctx.STRING_().getSymbol().getStopIndex(), value.getValue().toString()); String dataType = null != ctx.constTypeName() ? ctx.constTypeName().getText() : ctx.funcName().getText(); return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), expression, dataType); } return SQLUtils.createLiteralExpression(value, ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitColumnref(final ColumnrefContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); ColumnSegment result = new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); result.setOwner(owner); return result; } return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } private InExpression createInSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = createInExpressionSegment(ctx.inExpr()); boolean not = null != ctx.NOT(); return new InExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, not); } @SuppressWarnings("unchecked") private ExpressionSegment createInExpressionSegment(final InExprContext ctx) { if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquerySegment = new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), select); return new SubqueryExpressionSegment(subquerySegment); } ListExpression result = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex()); result.getItems().addAll(((CollectionValue<ExpressionSegment>) visit(ctx.exprList())).getValue()); return result; } @SuppressWarnings("unchecked") @Override public ASTNode visitExprList(final ExprListContext ctx) { CollectionValue<ExpressionSegment> result = new CollectionValue<>(); if (null != ctx.exprList()) { result.combine((CollectionValue<ExpressionSegment>) visitExprList(ctx.exprList())); } result.getValue().add((ExpressionSegment) visit(ctx.aExpr())); return result; } private BetweenExpression createBetweenSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment between = (ExpressionSegment) visit(ctx.bExpr()); ExpressionSegment and = (ExpressionSegment) visit(ctx.aExpr(1)); boolean not = null != ctx.NOT(); return new BetweenExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, between, and, not); } @Override public ASTNode visitBExpr(final BExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.bExpr(0)), ctx.typeName().getText()); } if (null != ctx.qualOp()) { ExpressionSegment left = (ExpressionSegment) visit(ctx.bExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.bExpr(1)); String operator = ctx.qualOp().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } for (BExprContext each : ctx.bExpr()) { visit(each); } return new LiteralExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } private ProjectionSegment createAggregationSegment(final FuncApplicationContext ctx, final String aggregationType, final Collection<ExpressionSegment> expressionSegments) { AggregationType type = AggregationType.valueOf(aggregationType.toUpperCase()); String innerExpression = ctx.start.getInputStream().getText(new Interval(ctx.LP_().getSymbol().getStartIndex(), ctx.stop.getStopIndex())); if (null == ctx.DISTINCT()) { AggregationProjectionSegment result = new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression); result.getParameters().addAll(expressionSegments); return result; } AggregationDistinctProjectionSegment result = new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression, getDistinctExpression(ctx)); result.getParameters().addAll(expressionSegments); return result; } private String getDistinctExpression(final FuncApplicationContext ctx) { StringBuilder result = new StringBuilder(); result.append(ctx.funcArgList().getText()); if (null != ctx.sortClause()) { result.append(ctx.sortClause().getText()); } return result.toString(); } @Override public final ASTNode visitDataTypeName(final DataTypeNameContext ctx) { IdentifierContext identifierContext = ctx.identifier(); if (null != identifierContext) { return new KeywordValue(identifierContext.getText()); } Collection<String> dataTypeNames = new LinkedList<>(); for (int i = 0; i < ctx.getChildCount(); i++) { dataTypeNames.add(ctx.getChild(i).getText()); } return new KeywordValue(String.join(" ", dataTypeNames)); } @Override public final ASTNode visitSortClause(final SortClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (SortbyContext each : ctx.sortbyList().sortby()) { items.add((OrderByItemSegment) visit(each)); } return new OrderBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items); } @Override public final ASTNode visitSortby(final SortbyContext ctx) { OrderDirection orderDirection = null != ctx.ascDesc() ? generateOrderDirection(ctx.ascDesc()) : OrderDirection.ASC; NullsOrderType nullsOrderType = generateNullsOrderType(ctx.nullsOrder()); ASTNode expr = visit(ctx.aExpr()); if (expr instanceof ColumnSegment) { ColumnSegment column = (ColumnSegment) expr; return new ColumnOrderByItemSegment(column, orderDirection, nullsOrderType); } if (expr instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) expr; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), orderDirection, nullsOrderType); } if (expr instanceof ExpressionSegment) { return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType, (ExpressionSegment) expr); } return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType); } private NullsOrderType generateNullsOrderType(final NullsOrderContext ctx) { if (null == ctx) { return null; } return null == ctx.FIRST() ? NullsOrderType.LAST : NullsOrderType.FIRST; } private OrderDirection generateOrderDirection(final AscDescContext ctx) { return null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC; } @Override public final ASTNode visitDataType(final DataTypeContext ctx) { DataTypeSegment result = new DataTypeSegment(); result.setDataTypeName(((KeywordValue) visit(ctx.dataTypeName())).getValue()); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); if (null != ctx.dataTypeLength()) { DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.dataTypeLength()); result.setDataLength(dataTypeLengthSegment); } return result; } @Override public final ASTNode visitDataTypeLength(final DataTypeLengthContext ctx) { DataTypeLengthSegment result = new DataTypeLengthSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStartIndex()); List<TerminalNode> numbers = ctx.NUMBER_(); if (1 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); } if (2 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); result.setScale(Integer.parseInt(numbers.get(1).getText())); } return result; } @Override public ASTNode visitInsert(final InsertContext ctx) { PostgreSQLInsertStatement result = (PostgreSQLInsertStatement) visit(ctx.insertRest()); result.setTable((SimpleTableSegment) visit(ctx.insertTarget())); if (null != ctx.optOnConflict()) { result.setOnDuplicateKeyColumnsSegment((OnDuplicateKeyColumnsSegment) visit(ctx.optOnConflict())); } if (null != ctx.returningClause()) { result.setReturningSegment((ReturningSegment) visit(ctx.returningClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitOptOnConflict(final OptOnConflictContext ctx) { SetClauseListContext setClauseListContext = ctx.setClauseList(); Collection<AssignmentSegment> assignments = ((SetAssignmentSegment) visit(setClauseListContext)).getAssignments(); return new OnDuplicateKeyColumnsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), assignments); } @Override public ASTNode visitInsertTarget(final InsertTargetContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.qualifiedName()); if (null != ctx.AS()) { ColIdContext colId = ctx.colId(); result.setAlias(new AliasSegment(colId.start.getStartIndex(), colId.stop.getStopIndex(), new IdentifierValue(colId.getText()))); } return result; } @Override public ASTNode visitQualifiedNameList(final QualifiedNameListContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); if (null != ctx.qualifiedName()) { result.getValue().add((SimpleTableSegment) visit(ctx.qualifiedName())); } if (null != ctx.qualifiedNameList()) { result.combine((CollectionValue) visit(ctx.qualifiedNameList())); } return result; } @Override public ASTNode visitQualifiedName(final QualifiedNameContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); TableNameSegment tableName = new TableNameSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); SimpleTableSegment result = new SimpleTableSegment(tableName); if (null != ctx.indirection().indirection()) { OwnerSegment tableOwner = createTableOwner(ctx.indirection().indirection()); tableOwner.setOwner(owner); result.setOwner(tableOwner); } else { result.setOwner(owner); } return result; } return new SimpleTableSegment(new TableNameSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } @Override public ASTNode visitInsertRest(final InsertRestContext ctx) { PostgreSQLInsertStatement result = new PostgreSQLInsertStatement(); if (null == ctx.insertColumnList()) { result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList())); } else { InsertColumnListContext insertColumns = ctx.insertColumnList(); CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(insertColumns); InsertColumnsSegment insertColumnsSegment = new InsertColumnsSegment(insertColumns.start.getStartIndex() - 1, insertColumns.stop.getStopIndex() + 1, columns.getValue()); result.setInsertColumns(insertColumnsSegment); } ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); if (null == valuesClause) { PostgreSQLSelectStatement selectStatement = (PostgreSQLSelectStatement) visit(ctx.select()); result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); } else { result.getValues().addAll(createInsertValuesSegments(valuesClause)); } return result; } @Override public ASTNode visitInsertColumnList(final InsertColumnListContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); if (null != ctx.insertColumnList()) { result.getValue().addAll(((CollectionValue<ColumnSegment>) visit(ctx.insertColumnList())).getValue()); } result.getValue().add((ColumnSegment) visit(ctx.insertColumnItem())); return result; } @Override public ASTNode visitInsertColumnItem(final InsertColumnItemContext ctx) { if (null == ctx.optIndirection().indirectionEl()) { return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } ColumnSegment result = new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.optIndirection().stop.getStopIndex(), new IdentifierValue(ctx.optIndirection().indirectionEl().attrName().getText())); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } private Collection<InsertValuesSegment> createInsertValuesSegments(final ValuesClauseContext ctx) { Collection<InsertValuesSegment> result = new LinkedList<>(); if (null != ctx.valuesClause()) { Collection<InsertValuesSegment> expressions = createInsertValuesSegments(ctx.valuesClause()); result.addAll(expressions); } Collection<ExpressionSegment> expressions = createInsertValuesSegments(ctx.exprList()); InsertValuesSegment insertValuesSegment = new InsertValuesSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), (List<ExpressionSegment>) expressions); result.add(insertValuesSegment); return result; } private Collection<ExpressionSegment> createInsertValuesSegments(final ExprListContext ctx) { Collection<ExpressionSegment> result = new LinkedList<>(); if (null != ctx.exprList()) { Collection<ExpressionSegment> tmpResult = createInsertValuesSegments(ctx.exprList()); result.addAll(tmpResult); } ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); result.add(expr); return result; } private Collection<AssignmentSegment> generateAssignmentSegments(final SetClauseListContext ctx) { Collection<AssignmentSegment> result = new LinkedList<>(); if (null != ctx.setClauseList()) { Collection<AssignmentSegment> tmpResult = generateAssignmentSegments(ctx.setClauseList()); result.addAll(tmpResult); } AssignmentSegment assignmentSegment = (AssignmentSegment) visit(ctx.setClause()); result.add(assignmentSegment); return result; } @Override public ASTNode visitSetClause(final SetClauseContext ctx) { ColumnSegment columnSegment = (ColumnSegment) visit(ctx.setTarget()); List<ColumnSegment> columnSegments = new LinkedList<>(); columnSegments.add(columnSegment); ExpressionSegment expressionSegment = (ExpressionSegment) visit(ctx.aExpr()); return new ColumnAssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), columnSegments, expressionSegment); } @Override public ASTNode visitSetTarget(final SetTargetContext ctx) { IdentifierValue identifierValue = new IdentifierValue(ctx.colId().getText()); return new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), identifierValue); } @Override public ASTNode visitRelationExprOptAlias(final RelationExprOptAliasContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.colId()) { result.setAlias(new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } return result; } @Override public ASTNode visitUpdate(final UpdateContext ctx) { PostgreSQLUpdateStatement result = new PostgreSQLUpdateStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); result.setSetAssignment((SetAssignmentSegment) visit(ctx.setClauseList())); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSetClauseList(final SetClauseListContext ctx) { Collection<AssignmentSegment> assignments = generateAssignmentSegments(ctx); return new SetAssignmentSegment(ctx.start.getStartIndex() - 4, ctx.stop.getStopIndex(), assignments); } @Override public ASTNode visitDelete(final DeleteContext ctx) { PostgreSQLDeleteStatement result = new PostgreSQLDeleteStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitWhereOrCurrentClause(final WhereOrCurrentClauseContext ctx) { return visit(ctx.whereClause()); } @Override public ASTNode visitSelect(final SelectContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectNoParens()); result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSelectNoParens(final SelectNoParensContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectClauseN()); if (null != ctx.sortClause()) { OrderBySegment orderBySegment = (OrderBySegment) visit(ctx.sortClause()); result.setOrderBy(orderBySegment); } if (null != ctx.selectLimit()) { LimitSegment limitSegment = (LimitSegment) visit(ctx.selectLimit()); result.setLimit(limitSegment); } if (null != ctx.forLockingClause()) { LockSegment lockSegment = (LockSegment) visit(ctx.forLockingClause()); result.setLock(lockSegment); } return result; } @Override public ASTNode visitForLockingClause(final ForLockingClauseContext ctx) { return new LockSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } @Override public ASTNode visitSelectWithParens(final SelectWithParensContext ctx) { if (null != ctx.selectWithParens()) { return visit(ctx.selectWithParens()); } return visit(ctx.selectNoParens()); } @Override public ASTNode visitSelectClauseN(final SelectClauseNContext ctx) { if (null != ctx.simpleSelect()) { return visit(ctx.simpleSelect()); } if (null != ctx.selectClauseN() && !ctx.selectClauseN().isEmpty()) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); PostgreSQLSelectStatement left = (PostgreSQLSelectStatement) visit(ctx.selectClauseN(0)); result.setProjections(left.getProjections()); result.setFrom(left.getFrom()); CombineSegment combineSegment = new CombineSegment(((TerminalNode) ctx.getChild(1)).getSymbol().getStartIndex(), ctx.getStop().getStopIndex(), left, getCombineType(ctx), (PostgreSQLSelectStatement) visit(ctx.selectClauseN(1))); result.setCombine(combineSegment); return result; } return visit(ctx.selectWithParens()); } private CombineType getCombineType(final SelectClauseNContext ctx) { boolean isDistinct = null == ctx.allOrDistinct() || null != ctx.allOrDistinct().DISTINCT(); if (null != ctx.UNION()) { return isDistinct ? CombineType.UNION : CombineType.UNION_ALL; } if (null != ctx.INTERSECT()) { return isDistinct ? CombineType.INTERSECT : CombineType.INTERSECT_ALL; } return isDistinct ? CombineType.EXCEPT : CombineType.EXCEPT_ALL; } @Override public ASTNode visitSimpleSelect(final SimpleSelectContext ctx) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); if (null == ctx.targetList()) { result.setProjections(new ProjectionsSegment(-1, -1)); } else { ProjectionsSegment projects = (ProjectionsSegment) visit(ctx.targetList()); if (null != ctx.distinctClause()) { projects.setDistinctRow(true); } result.setProjections(projects); } if (null != ctx.fromClause()) { TableSegment tableSegment = (TableSegment) visit(ctx.fromClause()); result.setFrom(tableSegment); } if (null != ctx.whereClause()) { result.setWhere((WhereSegment) visit(ctx.whereClause())); } if (null != ctx.groupClause()) { result.setGroupBy((GroupBySegment) visit(ctx.groupClause())); } if (null != ctx.havingClause()) { result.setHaving((HavingSegment) visit(ctx.havingClause())); } if (null != ctx.windowClause()) { result.setWindow((WindowSegment) visit(ctx.windowClause())); } return result; } @Override public ASTNode visitHavingClause(final HavingClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new HavingSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitWindowClause(final WindowClauseContext ctx) { return new WindowSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()); } @Override public ASTNode visitGroupClause(final GroupClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (GroupByItemContext each : ctx.groupByList().groupByItem()) { items.add((OrderByItemSegment) visit(each)); } return new GroupBySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), items); } @Override public ASTNode visitGroupByItem(final GroupByItemContext ctx) { if (null != ctx.aExpr()) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ColumnSegment) { return new ColumnOrderByItemSegment((ColumnSegment) astNode, OrderDirection.ASC, null); } if (astNode instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) astNode; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } @Override public ASTNode visitTargetList(final TargetListContext ctx) { ProjectionsSegment result = new ProjectionsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); if (null != ctx.targetList()) { ProjectionsSegment projections = (ProjectionsSegment) visit(ctx.targetList()); result.getProjections().addAll(projections.getProjections()); } ProjectionSegment projection = (ProjectionSegment) visit(ctx.targetEl()); result.getProjections().add(projection); return result; } @Override public ASTNode visitTargetEl(final TargetElContext ctx) { ProjectionSegment result = createProjectionSegment(ctx, ctx.aExpr()); if (null != ctx.identifier()) { ((AliasAvailable) result).setAlias(new AliasSegment(ctx.identifier().start.getStartIndex(), ctx.identifier().stop.getStopIndex(), new IdentifierValue(ctx.identifier().getText()))); } return result; } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr) { if (null != ctx.ASTERISK_()) { return new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } if (null != ctx.DOT_ASTERISK_()) { ShorthandProjectionSegment result = new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } if (null != ctx.aExpr()) { ASTNode projection = visit(ctx.aExpr()); return createProjectionSegment(ctx, expr, projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr, final ASTNode projection) { if (projection instanceof ColumnSegment) { return new ColumnProjectionSegment((ColumnSegment) projection); } if (projection instanceof AggregationProjectionSegment) { return (AggregationProjectionSegment) projection; } if (projection instanceof SubqueryExpressionSegment) { SubqueryExpressionSegment subqueryExpression = (SubqueryExpressionSegment) projection; String text = ctx.start.getInputStream().getText(new Interval(subqueryExpression.getStartIndex(), subqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(subqueryExpression.getSubquery(), text); } if (projection instanceof ExistsSubqueryExpression) { ExistsSubqueryExpression existsSubqueryExpression = (ExistsSubqueryExpression) projection; String text = ctx.start.getInputStream().getText(new Interval(existsSubqueryExpression.getStartIndex(), existsSubqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(existsSubqueryExpression.getSubquery(), text); } if (projection instanceof ExpressionSegment) { return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), (ExpressionSegment) projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } @Override public ASTNode visitFromClause(final FromClauseContext ctx) { return visit(ctx.fromList()); } @Override public ASTNode visitFromList(final FromListContext ctx) { if (null != ctx.fromList()) { JoinTableSegment result = new JoinTableSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); result.setLeft((TableSegment) visit(ctx.fromList())); result.setRight((TableSegment) visit(ctx.tableReference())); result.setJoinType(JoinType.COMMA.name()); return result; } return visit(ctx.tableReference()); } @Override public ASTNode visitTableReference(final TableReferenceContext ctx) { if (null != ctx.relationExpr()) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.aliasClause()) { result.setAlias((AliasSegment) visit(ctx.aliasClause())); } return result; } if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquery = new SubquerySegment(ctx.selectWithParens().start.getStartIndex(), ctx.selectWithParens().stop.getStopIndex(), select); AliasSegment alias = null != ctx.aliasClause() ? (AliasSegment) visit(ctx.aliasClause()) : null; SubqueryTableSegment result = new SubqueryTableSegment(subquery); result.setAlias(alias); return result; } if (null == ctx.tableReference()) { TableNameSegment tableName = new TableNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue("not support")); return new SimpleTableSegment(tableName); } JoinTableSegment result = new JoinTableSegment(); result.setLeft((TableSegment) visit(ctx.tableReference())); int startIndex = null != ctx.LP_() ? ctx.LP_().getSymbol().getStartIndex() : ctx.tableReference().start.getStartIndex(); int stopIndex = 0; AliasSegment alias = null; if (null == ctx.aliasClause()) { stopIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.tableReference().start.getStopIndex(); } else { alias = (AliasSegment) visit(ctx.aliasClause()); startIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.joinedTable().stop.getStopIndex(); } result.setStartIndex(startIndex); result.setStopIndex(stopIndex); result = visitJoinedTable(ctx.joinedTable(), result); result.setAlias(alias); return result; } private JoinTableSegment visitJoinedTable(final JoinedTableContext ctx, final JoinTableSegment tableSegment) { TableSegment right = (TableSegment) visit(ctx.tableReference()); tableSegment.setRight(right); tableSegment.setJoinType(getJoinType(ctx)); tableSegment.setNatural(null != ctx.naturalJoinType()); return null != ctx.joinQual() ? visitJoinQual(ctx.joinQual(), tableSegment) : tableSegment; } private String getJoinType(final JoinedTableContext ctx) { if (null != ctx.crossJoinType()) { return JoinType.CROSS.name(); } if (null != ctx.innerJoinType()) { return JoinType.INNER.name(); } if (null != ctx.outerJoinType()) { return getOutJoinType(ctx.outerJoinType()); } if (null != ctx.naturalJoinType()) { return getNaturalJoinType(ctx.naturalJoinType()); } return JoinType.COMMA.name(); } private static String getNaturalJoinType(final NaturalJoinTypeContext ctx) { if (null != ctx.INNER()) { return JoinType.INNER.name(); } if (null != ctx.FULL()) { return JoinType.FULL.name(); } if (null != ctx.LEFT()) { return JoinType.LEFT.name(); } if (null != ctx.RIGHT()) { return JoinType.RIGHT.name(); } return JoinType.INNER.name(); } private static String getOutJoinType(final OuterJoinTypeContext ctx) { if (null == ctx.FULL()) { return null != ctx.LEFT() ? JoinType.LEFT.name() : JoinType.RIGHT.name(); } return JoinType.FULL.name(); } private JoinTableSegment visitJoinQual(final JoinQualContext ctx, final JoinTableSegment joinTableSource) { if (null != ctx.aExpr()) { ExpressionSegment condition = (ExpressionSegment) visit(ctx.aExpr()); joinTableSource.setCondition(condition); } if (null != ctx.USING()) { joinTableSource.setUsing(generateUsingColumn(ctx.nameList())); } return joinTableSource; } private List<ColumnSegment> generateUsingColumn(final NameListContext ctx) { List<ColumnSegment> result = new ArrayList<>(); if (null != ctx.nameList()) { result.addAll(generateUsingColumn(ctx.nameList())); } if (null != ctx.name()) { result.add(new ColumnSegment(ctx.name().start.getStartIndex(), ctx.name().stop.getStopIndex(), new IdentifierValue(ctx.name().getText()))); } return result; } @Override public ASTNode visitAliasClause(final AliasClauseContext ctx) { StringBuilder aliasName = new StringBuilder(ctx.colId().getText()); if (null != ctx.nameList()) { aliasName.append(ctx.LP_().getText()); aliasName.append(ctx.nameList().getText()); aliasName.append(ctx.RP_().getText()); } return new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(aliasName.toString())); } private OwnerSegment createTableOwner(final IndirectionContext ctx) { AttrNameContext attrName = ctx.indirectionEl().attrName(); return new OwnerSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); } @Override public ASTNode visitWhereClause(final WhereClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new WhereSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitSelectLimit(final SelectLimitContext ctx) { if (null != ctx.limitClause() && null != ctx.offsetClause()) { return createLimitSegmentWhenLimitAndOffset(ctx); } return createLimitSegmentWhenRowCountOrOffsetAbsent(ctx); } @Override public ASTNode visitSelectLimitValue(final SelectLimitValueContext ctx) { if (null != ctx.ALL()) { return null; } ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectOffsetValue(final SelectOffsetValueContext ctx) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectFetchFirstValue(final SelectFetchFirstValueContext ctx) { ASTNode astNode = visit(ctx.cExpr()); if (null != astNode) { if (astNode instanceof ParameterMarkerLimitValueSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } return visit(ctx.NUMBER_()); } private LimitSegment createLimitSegmentWhenLimitAndOffset(final SelectLimitContext ctx) { ParseTree astNode0 = ctx.getChild(0); LimitValueSegment rowCount = null; LimitValueSegment offset = null; if (astNode0 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } ParseTree astNode1 = ctx.getChild(1); if (astNode1 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, rowCount); } private LimitSegment createLimitSegmentWhenRowCountOrOffsetAbsent(final SelectLimitContext ctx) { if (null != ctx.limitClause()) { if (null != ctx.limitClause().selectFetchFirstValue()) { LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectFetchFirstValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, null); } @Override public ASTNode visitExecuteStmt(final ExecuteStmtContext ctx) { return new PostgreSQLExecuteStatement(); } /** * Get original text. * * @param ctx context * @return original text */ protected String getOriginalText(final ParserRuleContext ctx) { return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); } @Override @SuppressWarnings("unchecked") public ASTNode visitAnyName(final AnyNameContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } result.getValue().add(new NameSegment(ctx.colId().getStart().getStartIndex(), ctx.colId().getStop().getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } @Override @SuppressWarnings("unchecked") public ASTNode visitAttrs(final AttrsContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); result.getValue().add(new NameSegment(ctx.attrName().getStart().getStartIndex(), ctx.attrName().getStop().getStopIndex(), new IdentifierValue(ctx.attrName().getText()))); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } return result; } @Override public ASTNode visitName(final NameContext ctx) { return visit(ctx.identifier()); } @Override public ASTNode visitSignedIconst(final SignedIconstContext ctx) { return new NumberLiteralValue(ctx.getText()); } }
class PostgreSQLStatementSQLVisitor extends PostgreSQLStatementParserBaseVisitor<ASTNode> { private final Collection<ParameterMarkerSegment> parameterMarkerSegments = new LinkedList<>(); public PostgreSQLStatementSQLVisitor(final Properties props) { } @Override public final ASTNode visitParameterMarker(final ParameterMarkerContext ctx) { if (null == ctx.DOLLAR_()) { return new ParameterMarkerValue(parameterMarkerSegments.size(), ParameterMarkerType.QUESTION); } return new ParameterMarkerValue(new NumberLiteralValue(ctx.NUMBER_().getText()).getValue().intValue() - 1, ParameterMarkerType.DOLLAR); } @Override public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { return new NumberLiteralValue(ctx.NUMBER_().getText()); } @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitUnreservedWord(final UnreservedWordContext ctx) { return new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitSchemaName(final SchemaNameContext ctx) { return visit(ctx.identifier()); } @Override public final ASTNode visitTableName(final TableNameContext ctx) { SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(), ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name()))); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitColumnName(final ColumnNameContext ctx) { ColumnSegment result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitIndexName(final IndexNameContext ctx) { IndexNameSegment indexName = new IndexNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier())); return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName); } @Override public final ASTNode visitConstraintName(final ConstraintNameContext ctx) { return new ConstraintSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier())); } @Override public final ASTNode visitTableNames(final TableNamesContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); for (TableNameContext each : ctx.tableName()) { result.getValue().add((SimpleTableSegment) visit(each)); } return result; } @Override public final ASTNode visitColumnNames(final ColumnNamesContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); for (ColumnNameContext each : ctx.columnName()) { result.getValue().add((ColumnSegment) visit(each)); } return result; } @Override public ASTNode visitAExpr(final AExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } if (null != ctx.BETWEEN()) { return createBetweenSegment(ctx); } if (null != ctx.IN()) { return createInSegment(ctx); } if (null != ctx.patternMatchingOperator()) { return createPatternMatchingOperationSegment(ctx); } Optional<String> binaryOperator = findBinaryOperator(ctx); if (binaryOperator.isPresent()) { return createBinaryOperationSegment(ctx, binaryOperator.get()); } super.visitAExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text); } private Optional<String> findBinaryOperator(final AExprContext ctx) { if (null != ctx.IS()) { return Optional.of(ctx.IS().getText()); } if (null != ctx.ISNULL()) { return Optional.of("IS"); } if (1 == ctx.aExpr().size()) { return Optional.empty(); } if (null != ctx.comparisonOperator()) { return Optional.of(ctx.comparisonOperator().getText()); } if (null != ctx.andOperator()) { return Optional.of(ctx.andOperator().getText()); } if (null != ctx.orOperator()) { return Optional.of(ctx.orOperator().getText()); } if (null != ctx.PLUS_()) { return Optional.of(ctx.PLUS_().getText()); } if (null != ctx.MINUS_()) { return Optional.of(ctx.MINUS_().getText()); } if (null != ctx.ASTERISK_()) { return Optional.of(ctx.ASTERISK_().getText()); } if (null != ctx.SLASH_()) { return Optional.of(ctx.SLASH_().getText()); } return Optional.empty(); } private BinaryOperationExpression createBinaryOperationSegment(final AExprContext ctx, final String operator) { if ("IS".equalsIgnoreCase(operator)) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); String rightText; ExpressionSegment right; if (null != ctx.IS()) { rightText = ctx.start.getInputStream().getText(new Interval(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText); } else { rightText = ctx.start.getInputStream().getText(new Interval(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex(), rightText); } return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, "IS", ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()))); } ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.aExpr(1)); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @Override public ASTNode visitCExpr(final CExprContext ctx) { if (null != ctx.columnref()) { return visit(ctx.columnref()); } if (null != ctx.parameterMarker()) { ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker()); ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), parameterMarker.getValue(), parameterMarker.getType()); parameterMarkerSegments.add(result); return result; } if (null != ctx.aexprConst()) { return visit(ctx.aexprConst()); } if (null != ctx.aExpr()) { return visit(ctx.aExpr()); } if (null != ctx.funcExpr()) { return visit(ctx.funcExpr()); } if (null != ctx.selectWithParens()) { return createSubqueryExpressionSegment(ctx); } if (null != ctx.caseExpr()) { return visit(ctx.caseExpr()); } super.visitCExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), text); } private ExpressionSegment createSubqueryExpressionSegment(final CExprContext ctx) { SubquerySegment subquerySegment = new SubquerySegment(ctx.selectWithParens().getStart().getStartIndex(), ctx.selectWithParens().getStop().getStopIndex(), (PostgreSQLSelectStatement) visit(ctx.selectWithParens())); return null == ctx.EXISTS() ? new SubqueryExpressionSegment(subquerySegment) : new ExistsSubqueryExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment); } @Override public ASTNode visitCaseExpr(final CaseExprContext ctx) { Collection<ExpressionSegment> whenExprs = new LinkedList<>(); Collection<ExpressionSegment> thenExprs = new LinkedList<>(); for (WhenClauseContext each : ctx.whenClauseList().whenClause()) { whenExprs.add((ExpressionSegment) visit(each.aExpr(0))); thenExprs.add((ExpressionSegment) visit(each.aExpr(1))); } ExpressionSegment caseExpr = null == ctx.caseArg() ? null : (ExpressionSegment) visit(ctx.caseArg().aExpr()); ExpressionSegment elseExpr = null == ctx.caseDefault() ? null : (ExpressionSegment) visit(ctx.caseDefault().aExpr()); return new CaseWhenExpression(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), caseExpr, whenExprs, thenExprs, elseExpr); } @Override public ASTNode visitFuncExpr(final FuncExprContext ctx) { if (null != ctx.functionExprCommonSubexpr()) { return visit(ctx.functionExprCommonSubexpr()); } Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); String aggregationType = ctx.funcApplication().funcName().getText(); if (AggregationType.isAggregationType(aggregationType)) { return createAggregationSegment(ctx.funcApplication(), aggregationType, expressionSegments); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.funcApplication().funcName().getText(), getOriginalText(ctx)); result.getParameters().addAll(expressionSegments); return result; } @Override public ASTNode visitFunctionExprCommonSubexpr(final FunctionExprCommonSubexprContext ctx) { if (null != ctx.CAST()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getChild(0).getText(), getOriginalText(ctx)); Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); result.getParameters().addAll(expressionSegments); return result; } private <T extends ParseTree> Collection<T> getTargetRuleContextFromParseTree(final ParseTree parseTree, final Class<? extends T> clazz) { Collection<T> result = new LinkedList<>(); for (int index = 0; index < parseTree.getChildCount(); index++) { ParseTree child = parseTree.getChild(index); if (clazz.isInstance(child)) { result.add(clazz.cast(child)); } else { result.addAll(getTargetRuleContextFromParseTree(child, clazz)); } } return result; } private Collection<ExpressionSegment> getExpressionSegments(final Collection<AExprContext> aExprContexts) { Collection<ExpressionSegment> result = new LinkedList<>(); for (AExprContext each : aExprContexts) { result.add((ExpressionSegment) visit(each)); } return result; } @Override public ASTNode visitAexprConst(final AexprConstContext ctx) { LiteralValue<?> value; if (null != ctx.numberConst()) { value = new NumberLiteralValue(ctx.numberConst().getText()); } else if (null != ctx.STRING_()) { value = new StringLiteralValue(ctx.STRING_().getText()); } else if (null != ctx.FALSE()) { value = new BooleanLiteralValue(ctx.FALSE().getText()); } else if (null != ctx.TRUE()) { value = new BooleanLiteralValue(ctx.TRUE().getText()); } else if (null != ctx.NULL()) { value = new NullLiteralValue(ctx.getText()); } else { value = new OtherLiteralValue(ctx.getText()); } if (null != ctx.constTypeName() || null != ctx.funcName() && null == ctx.LP_()) { LiteralExpressionSegment expression = new LiteralExpressionSegment(ctx.STRING_().getSymbol().getStartIndex(), ctx.STRING_().getSymbol().getStopIndex(), value.getValue().toString()); String dataType = null != ctx.constTypeName() ? ctx.constTypeName().getText() : ctx.funcName().getText(); return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), expression, dataType); } return SQLUtils.createLiteralExpression(value, ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitColumnref(final ColumnrefContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); ColumnSegment result = new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); result.setOwner(owner); return result; } return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } private InExpression createInSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = createInExpressionSegment(ctx.inExpr()); boolean not = null != ctx.NOT(); return new InExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, not); } @SuppressWarnings("unchecked") private ExpressionSegment createInExpressionSegment(final InExprContext ctx) { if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquerySegment = new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), select); return new SubqueryExpressionSegment(subquerySegment); } ListExpression result = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex()); result.getItems().addAll(((CollectionValue<ExpressionSegment>) visit(ctx.exprList())).getValue()); return result; } @SuppressWarnings("unchecked") @Override public ASTNode visitExprList(final ExprListContext ctx) { CollectionValue<ExpressionSegment> result = new CollectionValue<>(); if (null != ctx.exprList()) { result.combine((CollectionValue<ExpressionSegment>) visitExprList(ctx.exprList())); } result.getValue().add((ExpressionSegment) visit(ctx.aExpr())); return result; } private BetweenExpression createBetweenSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment between = (ExpressionSegment) visit(ctx.bExpr()); ExpressionSegment and = (ExpressionSegment) visit(ctx.aExpr(1)); boolean not = null != ctx.NOT(); return new BetweenExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, between, and, not); } @Override public ASTNode visitBExpr(final BExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.bExpr(0)), ctx.typeName().getText()); } if (null != ctx.qualOp()) { ExpressionSegment left = (ExpressionSegment) visit(ctx.bExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.bExpr(1)); String operator = ctx.qualOp().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } for (BExprContext each : ctx.bExpr()) { visit(each); } return new LiteralExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } private ProjectionSegment createAggregationSegment(final FuncApplicationContext ctx, final String aggregationType, final Collection<ExpressionSegment> expressionSegments) { AggregationType type = AggregationType.valueOf(aggregationType.toUpperCase()); String innerExpression = ctx.start.getInputStream().getText(new Interval(ctx.LP_().getSymbol().getStartIndex(), ctx.stop.getStopIndex())); if (null == ctx.DISTINCT()) { AggregationProjectionSegment result = new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression); result.getParameters().addAll(expressionSegments); return result; } AggregationDistinctProjectionSegment result = new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression, getDistinctExpression(ctx)); result.getParameters().addAll(expressionSegments); return result; } private String getDistinctExpression(final FuncApplicationContext ctx) { StringBuilder result = new StringBuilder(); result.append(ctx.funcArgList().getText()); if (null != ctx.sortClause()) { result.append(ctx.sortClause().getText()); } return result.toString(); } @Override public final ASTNode visitDataTypeName(final DataTypeNameContext ctx) { IdentifierContext identifierContext = ctx.identifier(); if (null != identifierContext) { return new KeywordValue(identifierContext.getText()); } Collection<String> dataTypeNames = new LinkedList<>(); for (int i = 0; i < ctx.getChildCount(); i++) { dataTypeNames.add(ctx.getChild(i).getText()); } return new KeywordValue(String.join(" ", dataTypeNames)); } @Override public final ASTNode visitSortClause(final SortClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (SortbyContext each : ctx.sortbyList().sortby()) { items.add((OrderByItemSegment) visit(each)); } return new OrderBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items); } @Override public final ASTNode visitSortby(final SortbyContext ctx) { OrderDirection orderDirection = null != ctx.ascDesc() ? generateOrderDirection(ctx.ascDesc()) : OrderDirection.ASC; NullsOrderType nullsOrderType = generateNullsOrderType(ctx.nullsOrder()); ASTNode expr = visit(ctx.aExpr()); if (expr instanceof ColumnSegment) { ColumnSegment column = (ColumnSegment) expr; return new ColumnOrderByItemSegment(column, orderDirection, nullsOrderType); } if (expr instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) expr; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), orderDirection, nullsOrderType); } if (expr instanceof ExpressionSegment) { return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType, (ExpressionSegment) expr); } return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType); } private NullsOrderType generateNullsOrderType(final NullsOrderContext ctx) { if (null == ctx) { return null; } return null == ctx.FIRST() ? NullsOrderType.LAST : NullsOrderType.FIRST; } private OrderDirection generateOrderDirection(final AscDescContext ctx) { return null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC; } @Override public final ASTNode visitDataType(final DataTypeContext ctx) { DataTypeSegment result = new DataTypeSegment(); result.setDataTypeName(((KeywordValue) visit(ctx.dataTypeName())).getValue()); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); if (null != ctx.dataTypeLength()) { DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.dataTypeLength()); result.setDataLength(dataTypeLengthSegment); } return result; } @Override public final ASTNode visitDataTypeLength(final DataTypeLengthContext ctx) { DataTypeLengthSegment result = new DataTypeLengthSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStartIndex()); List<TerminalNode> numbers = ctx.NUMBER_(); if (1 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); } if (2 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); result.setScale(Integer.parseInt(numbers.get(1).getText())); } return result; } @Override public ASTNode visitInsert(final InsertContext ctx) { PostgreSQLInsertStatement result = (PostgreSQLInsertStatement) visit(ctx.insertRest()); result.setTable((SimpleTableSegment) visit(ctx.insertTarget())); if (null != ctx.optOnConflict()) { result.setOnDuplicateKeyColumnsSegment((OnDuplicateKeyColumnsSegment) visit(ctx.optOnConflict())); } if (null != ctx.returningClause()) { result.setReturningSegment((ReturningSegment) visit(ctx.returningClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitOptOnConflict(final OptOnConflictContext ctx) { SetClauseListContext setClauseListContext = ctx.setClauseList(); Collection<AssignmentSegment> assignments = ((SetAssignmentSegment) visit(setClauseListContext)).getAssignments(); return new OnDuplicateKeyColumnsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), assignments); } @Override public ASTNode visitInsertTarget(final InsertTargetContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.qualifiedName()); if (null != ctx.AS()) { ColIdContext colId = ctx.colId(); result.setAlias(new AliasSegment(colId.start.getStartIndex(), colId.stop.getStopIndex(), new IdentifierValue(colId.getText()))); } return result; } @Override public ASTNode visitQualifiedNameList(final QualifiedNameListContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); if (null != ctx.qualifiedName()) { result.getValue().add((SimpleTableSegment) visit(ctx.qualifiedName())); } if (null != ctx.qualifiedNameList()) { result.combine((CollectionValue) visit(ctx.qualifiedNameList())); } return result; } @Override public ASTNode visitQualifiedName(final QualifiedNameContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); TableNameSegment tableName = new TableNameSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); SimpleTableSegment result = new SimpleTableSegment(tableName); if (null != ctx.indirection().indirection()) { OwnerSegment tableOwner = createTableOwner(ctx.indirection().indirection()); tableOwner.setOwner(owner); result.setOwner(tableOwner); } else { result.setOwner(owner); } return result; } return new SimpleTableSegment(new TableNameSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } @Override public ASTNode visitInsertRest(final InsertRestContext ctx) { PostgreSQLInsertStatement result = new PostgreSQLInsertStatement(); if (null == ctx.insertColumnList()) { result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList())); } else { InsertColumnListContext insertColumns = ctx.insertColumnList(); CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(insertColumns); InsertColumnsSegment insertColumnsSegment = new InsertColumnsSegment(insertColumns.start.getStartIndex() - 1, insertColumns.stop.getStopIndex() + 1, columns.getValue()); result.setInsertColumns(insertColumnsSegment); } ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); if (null == valuesClause) { PostgreSQLSelectStatement selectStatement = (PostgreSQLSelectStatement) visit(ctx.select()); result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); } else { result.getValues().addAll(createInsertValuesSegments(valuesClause)); } return result; } @Override public ASTNode visitInsertColumnList(final InsertColumnListContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); if (null != ctx.insertColumnList()) { result.getValue().addAll(((CollectionValue<ColumnSegment>) visit(ctx.insertColumnList())).getValue()); } result.getValue().add((ColumnSegment) visit(ctx.insertColumnItem())); return result; } @Override public ASTNode visitInsertColumnItem(final InsertColumnItemContext ctx) { if (null == ctx.optIndirection().indirectionEl()) { return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } ColumnSegment result = new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.optIndirection().stop.getStopIndex(), new IdentifierValue(ctx.optIndirection().indirectionEl().attrName().getText())); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } private Collection<InsertValuesSegment> createInsertValuesSegments(final ValuesClauseContext ctx) { Collection<InsertValuesSegment> result = new LinkedList<>(); if (null != ctx.valuesClause()) { Collection<InsertValuesSegment> expressions = createInsertValuesSegments(ctx.valuesClause()); result.addAll(expressions); } Collection<ExpressionSegment> expressions = createInsertValuesSegments(ctx.exprList()); InsertValuesSegment insertValuesSegment = new InsertValuesSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), (List<ExpressionSegment>) expressions); result.add(insertValuesSegment); return result; } private Collection<ExpressionSegment> createInsertValuesSegments(final ExprListContext ctx) { Collection<ExpressionSegment> result = new LinkedList<>(); if (null != ctx.exprList()) { Collection<ExpressionSegment> tmpResult = createInsertValuesSegments(ctx.exprList()); result.addAll(tmpResult); } ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); result.add(expr); return result; } private Collection<AssignmentSegment> generateAssignmentSegments(final SetClauseListContext ctx) { Collection<AssignmentSegment> result = new LinkedList<>(); if (null != ctx.setClauseList()) { Collection<AssignmentSegment> tmpResult = generateAssignmentSegments(ctx.setClauseList()); result.addAll(tmpResult); } AssignmentSegment assignmentSegment = (AssignmentSegment) visit(ctx.setClause()); result.add(assignmentSegment); return result; } @Override public ASTNode visitSetClause(final SetClauseContext ctx) { ColumnSegment columnSegment = (ColumnSegment) visit(ctx.setTarget()); List<ColumnSegment> columnSegments = new LinkedList<>(); columnSegments.add(columnSegment); ExpressionSegment expressionSegment = (ExpressionSegment) visit(ctx.aExpr()); return new ColumnAssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), columnSegments, expressionSegment); } @Override public ASTNode visitSetTarget(final SetTargetContext ctx) { IdentifierValue identifierValue = new IdentifierValue(ctx.colId().getText()); return new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), identifierValue); } @Override public ASTNode visitRelationExprOptAlias(final RelationExprOptAliasContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.colId()) { result.setAlias(new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } return result; } @Override public ASTNode visitUpdate(final UpdateContext ctx) { PostgreSQLUpdateStatement result = new PostgreSQLUpdateStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); result.setSetAssignment((SetAssignmentSegment) visit(ctx.setClauseList())); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSetClauseList(final SetClauseListContext ctx) { Collection<AssignmentSegment> assignments = generateAssignmentSegments(ctx); return new SetAssignmentSegment(ctx.start.getStartIndex() - 4, ctx.stop.getStopIndex(), assignments); } @Override public ASTNode visitDelete(final DeleteContext ctx) { PostgreSQLDeleteStatement result = new PostgreSQLDeleteStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitWhereOrCurrentClause(final WhereOrCurrentClauseContext ctx) { return visit(ctx.whereClause()); } @Override public ASTNode visitSelect(final SelectContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectNoParens()); result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSelectNoParens(final SelectNoParensContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectClauseN()); if (null != ctx.sortClause()) { OrderBySegment orderBySegment = (OrderBySegment) visit(ctx.sortClause()); result.setOrderBy(orderBySegment); } if (null != ctx.selectLimit()) { LimitSegment limitSegment = (LimitSegment) visit(ctx.selectLimit()); result.setLimit(limitSegment); } if (null != ctx.forLockingClause()) { LockSegment lockSegment = (LockSegment) visit(ctx.forLockingClause()); result.setLock(lockSegment); } return result; } @Override public ASTNode visitForLockingClause(final ForLockingClauseContext ctx) { return new LockSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } @Override public ASTNode visitSelectWithParens(final SelectWithParensContext ctx) { if (null != ctx.selectWithParens()) { return visit(ctx.selectWithParens()); } return visit(ctx.selectNoParens()); } @Override public ASTNode visitSelectClauseN(final SelectClauseNContext ctx) { if (null != ctx.simpleSelect()) { return visit(ctx.simpleSelect()); } if (null != ctx.selectClauseN() && !ctx.selectClauseN().isEmpty()) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); PostgreSQLSelectStatement left = (PostgreSQLSelectStatement) visit(ctx.selectClauseN(0)); result.setProjections(left.getProjections()); result.setFrom(left.getFrom()); CombineSegment combineSegment = new CombineSegment(((TerminalNode) ctx.getChild(1)).getSymbol().getStartIndex(), ctx.getStop().getStopIndex(), left, getCombineType(ctx), (PostgreSQLSelectStatement) visit(ctx.selectClauseN(1))); result.setCombine(combineSegment); return result; } return visit(ctx.selectWithParens()); } private CombineType getCombineType(final SelectClauseNContext ctx) { boolean isDistinct = null == ctx.allOrDistinct() || null != ctx.allOrDistinct().DISTINCT(); if (null != ctx.UNION()) { return isDistinct ? CombineType.UNION : CombineType.UNION_ALL; } if (null != ctx.INTERSECT()) { return isDistinct ? CombineType.INTERSECT : CombineType.INTERSECT_ALL; } return isDistinct ? CombineType.EXCEPT : CombineType.EXCEPT_ALL; } @Override public ASTNode visitSimpleSelect(final SimpleSelectContext ctx) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); if (null == ctx.targetList()) { result.setProjections(new ProjectionsSegment(-1, -1)); } else { ProjectionsSegment projects = (ProjectionsSegment) visit(ctx.targetList()); if (null != ctx.distinctClause()) { projects.setDistinctRow(true); } result.setProjections(projects); } if (null != ctx.fromClause()) { TableSegment tableSegment = (TableSegment) visit(ctx.fromClause()); result.setFrom(tableSegment); } if (null != ctx.whereClause()) { result.setWhere((WhereSegment) visit(ctx.whereClause())); } if (null != ctx.groupClause()) { result.setGroupBy((GroupBySegment) visit(ctx.groupClause())); } if (null != ctx.havingClause()) { result.setHaving((HavingSegment) visit(ctx.havingClause())); } if (null != ctx.windowClause()) { result.setWindow((WindowSegment) visit(ctx.windowClause())); } return result; } @Override public ASTNode visitHavingClause(final HavingClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new HavingSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitWindowClause(final WindowClauseContext ctx) { return new WindowSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()); } @Override public ASTNode visitGroupClause(final GroupClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (GroupByItemContext each : ctx.groupByList().groupByItem()) { items.add((OrderByItemSegment) visit(each)); } return new GroupBySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), items); } @Override public ASTNode visitGroupByItem(final GroupByItemContext ctx) { if (null != ctx.aExpr()) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ColumnSegment) { return new ColumnOrderByItemSegment((ColumnSegment) astNode, OrderDirection.ASC, null); } if (astNode instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) astNode; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } @Override public ASTNode visitTargetList(final TargetListContext ctx) { ProjectionsSegment result = new ProjectionsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); if (null != ctx.targetList()) { ProjectionsSegment projections = (ProjectionsSegment) visit(ctx.targetList()); result.getProjections().addAll(projections.getProjections()); } ProjectionSegment projection = (ProjectionSegment) visit(ctx.targetEl()); result.getProjections().add(projection); return result; } @Override public ASTNode visitTargetEl(final TargetElContext ctx) { ProjectionSegment result = createProjectionSegment(ctx, ctx.aExpr()); if (null != ctx.identifier()) { ((AliasAvailable) result).setAlias(new AliasSegment(ctx.identifier().start.getStartIndex(), ctx.identifier().stop.getStopIndex(), new IdentifierValue(ctx.identifier().getText()))); } return result; } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr) { if (null != ctx.ASTERISK_()) { return new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } if (null != ctx.DOT_ASTERISK_()) { ShorthandProjectionSegment result = new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } if (null != ctx.aExpr()) { ASTNode projection = visit(ctx.aExpr()); return createProjectionSegment(ctx, expr, projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr, final ASTNode projection) { if (projection instanceof ColumnSegment) { return new ColumnProjectionSegment((ColumnSegment) projection); } if (projection instanceof AggregationProjectionSegment) { return (AggregationProjectionSegment) projection; } if (projection instanceof SubqueryExpressionSegment) { SubqueryExpressionSegment subqueryExpression = (SubqueryExpressionSegment) projection; String text = ctx.start.getInputStream().getText(new Interval(subqueryExpression.getStartIndex(), subqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(subqueryExpression.getSubquery(), text); } if (projection instanceof ExistsSubqueryExpression) { ExistsSubqueryExpression existsSubqueryExpression = (ExistsSubqueryExpression) projection; String text = ctx.start.getInputStream().getText(new Interval(existsSubqueryExpression.getStartIndex(), existsSubqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(existsSubqueryExpression.getSubquery(), text); } if (projection instanceof ExpressionSegment) { return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), (ExpressionSegment) projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } @Override public ASTNode visitFromClause(final FromClauseContext ctx) { return visit(ctx.fromList()); } @Override public ASTNode visitFromList(final FromListContext ctx) { if (null != ctx.fromList()) { JoinTableSegment result = new JoinTableSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); result.setLeft((TableSegment) visit(ctx.fromList())); result.setRight((TableSegment) visit(ctx.tableReference())); result.setJoinType(JoinType.COMMA.name()); return result; } return visit(ctx.tableReference()); } @Override public ASTNode visitTableReference(final TableReferenceContext ctx) { if (null != ctx.relationExpr()) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.aliasClause()) { result.setAlias((AliasSegment) visit(ctx.aliasClause())); } return result; } if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquery = new SubquerySegment(ctx.selectWithParens().start.getStartIndex(), ctx.selectWithParens().stop.getStopIndex(), select); AliasSegment alias = null != ctx.aliasClause() ? (AliasSegment) visit(ctx.aliasClause()) : null; SubqueryTableSegment result = new SubqueryTableSegment(subquery); result.setAlias(alias); return result; } if (null == ctx.tableReference()) { TableNameSegment tableName = new TableNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue("not support")); return new SimpleTableSegment(tableName); } JoinTableSegment result = new JoinTableSegment(); result.setLeft((TableSegment) visit(ctx.tableReference())); int startIndex = null != ctx.LP_() ? ctx.LP_().getSymbol().getStartIndex() : ctx.tableReference().start.getStartIndex(); int stopIndex = 0; AliasSegment alias = null; if (null == ctx.aliasClause()) { stopIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.tableReference().start.getStopIndex(); } else { alias = (AliasSegment) visit(ctx.aliasClause()); startIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.joinedTable().stop.getStopIndex(); } result.setStartIndex(startIndex); result.setStopIndex(stopIndex); result = visitJoinedTable(ctx.joinedTable(), result); result.setAlias(alias); return result; } private JoinTableSegment visitJoinedTable(final JoinedTableContext ctx, final JoinTableSegment tableSegment) { TableSegment right = (TableSegment) visit(ctx.tableReference()); tableSegment.setRight(right); tableSegment.setJoinType(getJoinType(ctx)); tableSegment.setNatural(null != ctx.naturalJoinType()); return null != ctx.joinQual() ? visitJoinQual(ctx.joinQual(), tableSegment) : tableSegment; } private String getJoinType(final JoinedTableContext ctx) { if (null != ctx.crossJoinType()) { return JoinType.CROSS.name(); } if (null != ctx.innerJoinType()) { return JoinType.INNER.name(); } if (null != ctx.outerJoinType()) { return getOutJoinType(ctx.outerJoinType()); } if (null != ctx.naturalJoinType()) { return getNaturalJoinType(ctx.naturalJoinType()); } return JoinType.COMMA.name(); } private static String getNaturalJoinType(final NaturalJoinTypeContext ctx) { if (null != ctx.INNER()) { return JoinType.INNER.name(); } if (null != ctx.FULL()) { return JoinType.FULL.name(); } if (null != ctx.LEFT()) { return JoinType.LEFT.name(); } if (null != ctx.RIGHT()) { return JoinType.RIGHT.name(); } return JoinType.INNER.name(); } private static String getOutJoinType(final OuterJoinTypeContext ctx) { if (null == ctx.FULL()) { return null != ctx.LEFT() ? JoinType.LEFT.name() : JoinType.RIGHT.name(); } return JoinType.FULL.name(); } private JoinTableSegment visitJoinQual(final JoinQualContext ctx, final JoinTableSegment joinTableSource) { if (null != ctx.aExpr()) { ExpressionSegment condition = (ExpressionSegment) visit(ctx.aExpr()); joinTableSource.setCondition(condition); } if (null != ctx.USING()) { joinTableSource.setUsing(generateUsingColumn(ctx.nameList())); } return joinTableSource; } private List<ColumnSegment> generateUsingColumn(final NameListContext ctx) { List<ColumnSegment> result = new ArrayList<>(); if (null != ctx.nameList()) { result.addAll(generateUsingColumn(ctx.nameList())); } if (null != ctx.name()) { result.add(new ColumnSegment(ctx.name().start.getStartIndex(), ctx.name().stop.getStopIndex(), new IdentifierValue(ctx.name().getText()))); } return result; } @Override public ASTNode visitAliasClause(final AliasClauseContext ctx) { StringBuilder aliasName = new StringBuilder(ctx.colId().getText()); if (null != ctx.nameList()) { aliasName.append(ctx.LP_().getText()); aliasName.append(ctx.nameList().getText()); aliasName.append(ctx.RP_().getText()); } return new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(aliasName.toString())); } private OwnerSegment createTableOwner(final IndirectionContext ctx) { AttrNameContext attrName = ctx.indirectionEl().attrName(); return new OwnerSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); } @Override public ASTNode visitWhereClause(final WhereClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new WhereSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitSelectLimit(final SelectLimitContext ctx) { if (null != ctx.limitClause() && null != ctx.offsetClause()) { return createLimitSegmentWhenLimitAndOffset(ctx); } return createLimitSegmentWhenRowCountOrOffsetAbsent(ctx); } @Override public ASTNode visitSelectLimitValue(final SelectLimitValueContext ctx) { if (null != ctx.ALL()) { return null; } ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectOffsetValue(final SelectOffsetValueContext ctx) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectFetchFirstValue(final SelectFetchFirstValueContext ctx) { ASTNode astNode = visit(ctx.cExpr()); if (null != astNode) { if (astNode instanceof ParameterMarkerLimitValueSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } return visit(ctx.NUMBER_()); } private LimitSegment createLimitSegmentWhenLimitAndOffset(final SelectLimitContext ctx) { ParseTree astNode0 = ctx.getChild(0); LimitValueSegment rowCount = null; LimitValueSegment offset = null; if (astNode0 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } ParseTree astNode1 = ctx.getChild(1); if (astNode1 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, rowCount); } private LimitSegment createLimitSegmentWhenRowCountOrOffsetAbsent(final SelectLimitContext ctx) { if (null != ctx.limitClause()) { if (null != ctx.limitClause().selectFetchFirstValue()) { LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectFetchFirstValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, null); } @Override public ASTNode visitExecuteStmt(final ExecuteStmtContext ctx) { return new PostgreSQLExecuteStatement(); } /** * Get original text. * * @param ctx context * @return original text */ protected String getOriginalText(final ParserRuleContext ctx) { return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); } @Override @SuppressWarnings("unchecked") public ASTNode visitAnyName(final AnyNameContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } result.getValue().add(new NameSegment(ctx.colId().getStart().getStartIndex(), ctx.colId().getStop().getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } @Override @SuppressWarnings("unchecked") public ASTNode visitAttrs(final AttrsContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); result.getValue().add(new NameSegment(ctx.attrName().getStart().getStartIndex(), ctx.attrName().getStop().getStopIndex(), new IdentifierValue(ctx.attrName().getText()))); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } return result; } @Override public ASTNode visitName(final NameContext ctx) { return visit(ctx.identifier()); } @Override public ASTNode visitSignedIconst(final SignedIconstContext ctx) { return new NumberLiteralValue(ctx.getText()); } }
do we need to validate whether exception is CosmosException?
public Mono<ShouldRetryResult> shouldRetry(Exception e) { if (this.request == null) { logger.error("onBeforeSendRequest has not been invoked with the MetadataRequestRetryPolicy..."); return Mono.just(ShouldRetryResult.error(e)); } CosmosException cosmosException = Utils.as(e, CosmosException.class); if (shouldMarkRegionAsUnavailable(cosmosException)) { URI locationEndpointToRoute = request.requestContext.locationEndpointToRoute; if (request.isReadOnlyRequest()) { this.globalEndpointManager.markEndpointUnavailableForRead(locationEndpointToRoute); } else { this.globalEndpointManager.markEndpointUnavailableForWrite(locationEndpointToRoute); } } return Mono.just(ShouldRetryResult.error(cosmosException)); }
CosmosException cosmosException = Utils.as(e, CosmosException.class);
public Mono<ShouldRetryResult> shouldRetry(Exception e) { return webExceptionRetryPolicy.shouldRetry(e).flatMap(shouldRetryResult -> { if (!shouldRetryResult.shouldRetry) { if (this.request == null || this.webExceptionRetryPolicy == null) { logger.error("onBeforeSendRequest has not been invoked with the MetadataRequestRetryPolicy..."); return Mono.just(ShouldRetryResult.error(e)); } if (!(e instanceof CosmosException)) { logger.debug("exception is not an instance of CosmosException..."); return Mono.just(ShouldRetryResult.error(e)); } CosmosException cosmosException = Utils.as(e, CosmosException.class); if (shouldMarkRegionAsUnavailable(cosmosException)) { URI locationEndpointToRoute = request.requestContext.locationEndpointToRoute; if (request.isReadOnlyRequest()) { this.globalEndpointManager.markEndpointUnavailableForRead(locationEndpointToRoute); } else { this.globalEndpointManager.markEndpointUnavailableForWrite(locationEndpointToRoute); } } return Mono.just(ShouldRetryResult.error(cosmosException)); } return Mono.just(shouldRetryResult); }); }
class MetadataRequestRetryPolicy implements IRetryPolicy { private final static Logger logger = LoggerFactory.getLogger(MetadataRequestRetryPolicy.class); private final GlobalEndpointManager globalEndpointManager; private RxDocumentServiceRequest request; public MetadataRequestRetryPolicy(GlobalEndpointManager globalEndpointManager) { this.globalEndpointManager = globalEndpointManager; } public void onBeforeSendRequest(RxDocumentServiceRequest request) { this.request = request; } private static boolean shouldMarkRegionAsUnavailable(CosmosException exception) { if (WebExceptionUtility.isNetworkFailure(exception)) { return Exceptions.isSubStatusCode(exception, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE) || Exceptions.isSubStatusCode(exception, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT); } return false; } @Override @Override public RetryContext getRetryContext() { return null; } }
class MetadataRequestRetryPolicy implements IRetryPolicy { private final static Logger logger = LoggerFactory.getLogger(MetadataRequestRetryPolicy.class); private final GlobalEndpointManager globalEndpointManager; private RxDocumentServiceRequest request; private WebExceptionRetryPolicy webExceptionRetryPolicy; public MetadataRequestRetryPolicy(GlobalEndpointManager globalEndpointManager) { this.globalEndpointManager = globalEndpointManager; } public void onBeforeSendRequest(RxDocumentServiceRequest request) { this.request = request; this.webExceptionRetryPolicy = new WebExceptionRetryPolicy(BridgeInternal.getRetryContext(request.requestContext.cosmosDiagnostics)); } private boolean shouldMarkRegionAsUnavailable(CosmosException exception) { if (!(request.isAddressRefresh() || request.isMetadataRequest())) { return false; } if (WebExceptionUtility.isNetworkFailure(exception)) { return Exceptions.isSubStatusCode(exception, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE); } return false; } @Override @Override public RetryContext getRetryContext() { return null; } }
thats what that method returns
private ParserRuleContext getNextRuleForVarName() { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.REQUIRED_PARAM || parentCtx == ParserRuleContext.PARAM_LIST) { return ParserRuleContext.REQUIRED_PARAM_NAME_RHS; } else if (parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) { return ParserRuleContext.ASSIGN_OP; } else if (parentCtx == ParserRuleContext.REST_PARAM) { return ParserRuleContext.PARAM_END; } else if (parentCtx == ParserRuleContext.FOREACH_STMT) { return ParserRuleContext.IN_KEYWORD; } else if (parentCtx == ParserRuleContext.TYPED_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.CAPTURE_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.LIST_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.REST_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.FIELD_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.MAPPING_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL) { return ParserRuleContext.VAR_DECL_STMT_RHS; } else if (parentCtx == ParserRuleContext.RECORD_FIELD) { return ParserRuleContext.FIELD_DESCRIPTOR_RHS; } else if (parentCtx == ParserRuleContext.ARG_LIST) { return ParserRuleContext.NAMED_OR_POSITIONAL_ARG_RHS; } else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) { return ParserRuleContext.OBJECT_FIELD_RHS; } else if (parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { return ParserRuleContext.CLOSE_BRACKET; } else if (parentCtx == ParserRuleContext.KEY_SPECIFIER) { return ParserRuleContext.TABLE_KEY_RHS; } else if (parentCtx == ParserRuleContext.LET_EXPR_LET_VAR_DECL || parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { return ParserRuleContext.ASSIGN_OP; } else if (parentCtx == ParserRuleContext.ANNOTATION_DECL) { return ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS; } else if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { return ParserRuleContext.IN_KEYWORD; } else { throw new IllegalStateException(parentCtx.toString()); } }
return getNextRuleForTypedBindingPattern();
private ParserRuleContext getNextRuleForVarName() { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.REQUIRED_PARAM || parentCtx == ParserRuleContext.PARAM_LIST) { return ParserRuleContext.REQUIRED_PARAM_NAME_RHS; } else if (parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) { return ParserRuleContext.ASSIGN_OP; } else if (parentCtx == ParserRuleContext.REST_PARAM) { return ParserRuleContext.PARAM_END; } else if (parentCtx == ParserRuleContext.FOREACH_STMT) { return ParserRuleContext.IN_KEYWORD; } else if (parentCtx == ParserRuleContext.TYPED_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.CAPTURE_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.LIST_BINDING_PATTERN || parentCtx == ParserRuleContext.LIST_BP_OR_TUPLE_TYPE_MEMBER) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.REST_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.FIELD_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (parentCtx == ParserRuleContext.MAPPING_BINDING_PATTERN) { return getNextRuleForTypedBindingPattern(); } else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL) { return ParserRuleContext.VAR_DECL_STMT_RHS; } else if (parentCtx == ParserRuleContext.RECORD_FIELD) { return ParserRuleContext.FIELD_DESCRIPTOR_RHS; } else if (parentCtx == ParserRuleContext.ARG_LIST) { return ParserRuleContext.NAMED_OR_POSITIONAL_ARG_RHS; } else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) { return ParserRuleContext.OBJECT_FIELD_RHS; } else if (parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { return ParserRuleContext.CLOSE_BRACKET; } else if (parentCtx == ParserRuleContext.KEY_SPECIFIER) { return ParserRuleContext.TABLE_KEY_RHS; } else if (parentCtx == ParserRuleContext.LET_EXPR_LET_VAR_DECL || parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { return ParserRuleContext.ASSIGN_OP; } else if (parentCtx == ParserRuleContext.ANNOTATION_DECL) { return ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS; } else if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { return ParserRuleContext.IN_KEYWORD; } else { throw new IllegalStateException(parentCtx.toString()); } }
class BallerinaParserErrorHandler extends AbstractParserErrorHandler { /** * FUNC_DEF_OR_FUNC_TYPE --> When a func-def and func-type-desc are possible. * e.g: start of a module level construct that starts with 'function' keyword. */ private static final ParserRuleContext[] FUNC_TYPE_OR_DEF_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.FUNC_BODY_OR_TYPE_DESC_RHS }; private static final ParserRuleContext[] FUNC_BODY_OR_TYPE_DESC_RHS = { ParserRuleContext.FUNC_BODY, ParserRuleContext.AMBIGUOUS_FUNC_TYPE_DESC_RHS }; /** * FUNC_DEF --> When only function definitions are possible. eg: resource function. */ private static final ParserRuleContext[] FUNC_DEF_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.FUNC_BODY }; private static final ParserRuleContext[] FUNC_BODY = { ParserRuleContext.FUNC_BODY_BLOCK, ParserRuleContext.EXTERNAL_FUNC_BODY }; private static final ParserRuleContext[] OBJECT_FUNC_BODY = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXTERNAL_FUNC_BODY }; /** * ANNON_FUNC--> When a anonymous function is possible. */ private static final ParserRuleContext[] ANNON_FUNC_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.ANON_FUNC_BODY }; private static final ParserRuleContext[] ANON_FUNC_BODY = { ParserRuleContext.FUNC_BODY_BLOCK, ParserRuleContext.EXPLICIT_ANON_FUNC_EXPR_BODY_START }; /** * FUNC_TYPE --> When a only function type is possible. */ private static final ParserRuleContext[] FUNC_TYPE_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.FUNC_TYPE_DESC_END }; private static final ParserRuleContext[] WORKER_NAME_RHS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.BLOCK_STMT }; private static final ParserRuleContext[] STATEMENTS = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.ASSIGNMENT_STMT, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.IF_BLOCK, ParserRuleContext.WHILE_BLOCK, ParserRuleContext.CALL_STMT, ParserRuleContext.PANIC_STMT, ParserRuleContext.CONTINUE_STATEMENT, ParserRuleContext.BREAK_STATEMENT, ParserRuleContext.RETURN_STMT, ParserRuleContext.COMPOUND_ASSIGNMENT_STMT, ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT, ParserRuleContext.EXPRESSION_STATEMENT, ParserRuleContext.LOCK_STMT, ParserRuleContext.BLOCK_STMT, ParserRuleContext.NAMED_WORKER_DECL, ParserRuleContext.FORK_STMT, ParserRuleContext.FOREACH_STMT, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.TRANSACTION_STMT, ParserRuleContext.RETRY_STMT, ParserRuleContext.ROLLBACK_STMT }; private static final ParserRuleContext[] VAR_DECL_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] TOP_LEVEL_NODE = { ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE, ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.SERVICE_DECL, ParserRuleContext.ANNOTATION_DECL, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.MODULE_ENUM_DECLARATION, ParserRuleContext.EOF }; private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_METADATA = new ParserRuleContext[] { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE, ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.ANNOTATION_DECL, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.MODULE_ENUM_DECLARATION, ParserRuleContext.EOF }; private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_MODIFIER = { ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE, ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL, ParserRuleContext.ANNOTATION_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.MODULE_ENUM_DECLARATION, ParserRuleContext.EOF }; private static final ParserRuleContext[] TYPE_OR_VAR_NAME = { ParserRuleContext.VARIABLE_NAME, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN }; private static final ParserRuleContext[] ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] FIELD_DESCRIPTOR_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.QUESTION_MARK, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] FIELD_OR_REST_DESCIPTOR_RHS = { ParserRuleContext.ELLIPSIS, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] RECORD_BODY_START = { ParserRuleContext.CLOSED_RECORD_BODY_START, ParserRuleContext.OPEN_BRACE }; private static final ParserRuleContext[] RECORD_BODY_END = { ParserRuleContext.CLOSED_RECORD_BODY_END, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] TYPE_DESCRIPTORS = { ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR, ParserRuleContext.OBJECT_TYPE_DESCRIPTOR, ParserRuleContext.RECORD_TYPE_DESCRIPTOR, ParserRuleContext.NIL_TYPE_DESCRIPTOR, ParserRuleContext.PARAMETERIZED_TYPE, ParserRuleContext.ERROR_KEYWORD, ParserRuleContext.STREAM_KEYWORD, ParserRuleContext.TABLE_KEYWORD, ParserRuleContext.FUNC_TYPE_DESC, ParserRuleContext.PARENTHESISED_TYPE_DESC_START, ParserRuleContext.CONSTANT_EXPRESSION, ParserRuleContext.TUPLE_TYPE_DESC_START }; private static final ParserRuleContext[] RECORD_FIELD_OR_RECORD_END = { ParserRuleContext.RECORD_BODY_END, ParserRuleContext.RECORD_FIELD }; private static final ParserRuleContext[] RECORD_FIELD_START = { ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD }; private static final ParserRuleContext[] RECORD_FIELD_WITHOUT_METADATA = { ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD }; private static final ParserRuleContext[] ARG_START_OR_ARG_LIST_END = { ParserRuleContext.ARG_LIST_END, ParserRuleContext.ARG_START }; private static final ParserRuleContext[] ARG_START = { ParserRuleContext.VARIABLE_NAME, ParserRuleContext.ELLIPSIS, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] ARG_END = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.COMMA }; private static final ParserRuleContext[] NAMED_OR_POSITIONAL_ARG_RHS = { ParserRuleContext.ARG_END, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] OBJECT_FIELD_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] OBJECT_MEMBER_START = { ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK, ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] OBJECT_MEMBER_WITHOUT_METADATA = { ParserRuleContext.ASTERISK, ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD = { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.PRIVATE_KEYWORD, ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY }; private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY = { ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER, ParserRuleContext.OBJECT_METHOD_START }; private static final ParserRuleContext[] OBJECT_METHOD_START = { ParserRuleContext.REMOTE_KEYWORD, ParserRuleContext.FUNCTION_KEYWORD }; private static final ParserRuleContext[] OBJECT_TYPE_DESCRIPTOR_START = { ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER, ParserRuleContext.OBJECT_KEYWORD }; private static final ParserRuleContext[] ELSE_BODY = { ParserRuleContext.IF_BLOCK, ParserRuleContext.OPEN_BRACE }; private static final ParserRuleContext[] ELSE_BLOCK = { ParserRuleContext.ELSE_KEYWORD, ParserRuleContext.STATEMENT }; private static final ParserRuleContext[] CALL_STATEMENT = { ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] IMPORT_PREFIX_DECL = { ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] IMPORT_VERSION = { ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] IMPORT_DECL_RHS = { ParserRuleContext.SLASH, ParserRuleContext.DOT, ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] AFTER_IMPORT_MODULE_NAME = { ParserRuleContext.DOT, ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] MAJOR_MINOR_VERSION_END = { ParserRuleContext.DOT, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] RETURN_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] EXPRESSION_START = { ParserRuleContext.BASIC_LITERAL, ParserRuleContext.NIL_LITERAL, ParserRuleContext.VARIABLE_REF, ParserRuleContext.ACCESS_EXPRESSION, ParserRuleContext.TYPEOF_EXPRESSION, ParserRuleContext.TRAP_KEYWORD, ParserRuleContext.UNARY_EXPRESSION, ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.LIST_CONSTRUCTOR, ParserRuleContext.TYPE_CAST, ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION, ParserRuleContext.LET_EXPRESSION, ParserRuleContext.TEMPLATE_START, ParserRuleContext.XML_KEYWORD, ParserRuleContext.STRING_KEYWORD, ParserRuleContext.ANON_FUNC_EXPRESSION, ParserRuleContext.ERROR_KEYWORD, ParserRuleContext.NEW_KEYWORD, ParserRuleContext.START_KEYWORD, ParserRuleContext.FLUSH_KEYWORD, ParserRuleContext.LEFT_ARROW_TOKEN, ParserRuleContext.WAIT_KEYWORD, ParserRuleContext.COMMIT_KEYWORD, ParserRuleContext.TRANSACTIONAL_KEYWORD }; private static final ParserRuleContext[] FIRST_MAPPING_FIELD_START = { ParserRuleContext.MAPPING_FIELD, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] MAPPING_FIELD_START = { ParserRuleContext.MAPPING_FIELD_NAME, ParserRuleContext.STRING_LITERAL, ParserRuleContext.COMPUTED_FIELD_NAME, ParserRuleContext.ELLIPSIS }; private static final ParserRuleContext[] SPECIFIC_FIELD_RHS = { ParserRuleContext.COLON, ParserRuleContext.MAPPING_FIELD_END }; private static final ParserRuleContext[] MAPPING_FIELD_END = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.COMMA }; private static final ParserRuleContext[] OPTIONAL_SERVICE_NAME = { ParserRuleContext.SERVICE_NAME, ParserRuleContext.ON_KEYWORD }; private static final ParserRuleContext[] RESOURCE_DEF_START = { ParserRuleContext.RESOURCE_KEYWORD, ParserRuleContext.FUNC_DEF, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] CONST_DECL_RHS = { ParserRuleContext.STATEMENT_START_IDENTIFIER, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] ARRAY_LENGTH = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.DECIMAL_INTEGER_LITERAL, ParserRuleContext.HEX_INTEGER_LITERAL, ParserRuleContext.ASTERISK, ParserRuleContext.VARIABLE_REF }; private static final ParserRuleContext[] PARAM_LIST = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.REQUIRED_PARAM }; private static final ParserRuleContext[] PARAMETER_START = { ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESC_IN_PARAM }; private static final ParserRuleContext[] PARAMETER_WITHOUT_ANNOTS = { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESC_IN_PARAM }; private static final ParserRuleContext[] REQUIRED_PARAM_NAME_RHS = { ParserRuleContext.PARAM_END, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] PARAM_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS }; private static final ParserRuleContext[] STMT_START_WITH_EXPR_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.RIGHT_ARROW, ParserRuleContext.COMPOUND_BINARY_OPERATOR, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] STMT_START_WITH_IDENTIFIER = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.VARIABLE_NAME, ParserRuleContext.EXPRESSION_RHS, ParserRuleContext.VAR_DECL_STARTED_WITH_DENTIFIER }; private static final ParserRuleContext[] EXPRESSION_STATEMENT_START = { ParserRuleContext.VARIABLE_REF, ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.START_KEYWORD, ParserRuleContext.FLUSH_KEYWORD }; private static final ParserRuleContext[] ANNOT_DECL_OPTIONAL_TYPE = { ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER, ParserRuleContext.ANNOTATION_TAG }; private static final ParserRuleContext[] CONST_DECL_TYPE = { ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] ANNOT_DECL_RHS = { ParserRuleContext.ANNOTATION_TAG, ParserRuleContext.ON_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] ANNOT_OPTIONAL_ATTACH_POINTS = { ParserRuleContext.ON_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] ATTACH_POINT = { ParserRuleContext.SOURCE_KEYWORD, ParserRuleContext.ATTACH_POINT_IDENT }; private static final ParserRuleContext[] ATTACH_POINT_IDENT = { ParserRuleContext.SINGLE_KEYWORD_ATTACH_POINT_IDENT, ParserRuleContext.OBJECT_IDENT, ParserRuleContext.RESOURCE_IDENT, ParserRuleContext.RECORD_IDENT }; private static final ParserRuleContext[] ATTACH_POINT_END = { ParserRuleContext.COMMA, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] XML_NAMESPACE_PREFIX_DECL = { ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] CONSTANT_EXPRESSION = { ParserRuleContext.BASIC_LITERAL, ParserRuleContext.VARIABLE_REF, ParserRuleContext.PLUS_TOKEN, ParserRuleContext.MINUS_TOKEN, ParserRuleContext.NIL_LITERAL }; private static final ParserRuleContext[] LIST_CONSTRUCTOR_RHS = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] TYPE_CAST_PARAM = { ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS, ParserRuleContext.ANNOTATIONS }; private static final ParserRuleContext[] TYPE_CAST_PARAM_RHS = { ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS, ParserRuleContext.GT }; private static final ParserRuleContext[] TABLE_KEYWORD_RHS = { ParserRuleContext.KEY_SPECIFIER, ParserRuleContext.TABLE_CONSTRUCTOR }; private static final ParserRuleContext[] ROW_LIST_RHS = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.MAPPING_CONSTRUCTOR }; private static final ParserRuleContext[] TABLE_ROW_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACKET }; private static final ParserRuleContext[] KEY_SPECIFIER_RHS = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] TABLE_KEY_RHS = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS }; private static final ParserRuleContext[] ERROR_TYPE_PARAMS = { ParserRuleContext.INFERRED_TYPE_DESC, ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS }; private static final ParserRuleContext[] LET_VAR_DECL_START = { ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, ParserRuleContext.ANNOTATIONS }; private static final ParserRuleContext[] STREAM_TYPE_FIRST_PARAM_RHS = { ParserRuleContext.COMMA, ParserRuleContext.GT }; private static final ParserRuleContext[] TEMPLATE_MEMBER = { ParserRuleContext.TEMPLATE_STRING, ParserRuleContext.INTERPOLATION_START_TOKEN, ParserRuleContext.TEMPLATE_END }; private static final ParserRuleContext[] TEMPLATE_STRING_RHS = { ParserRuleContext.INTERPOLATION_START_TOKEN, ParserRuleContext.TEMPLATE_END }; private static final ParserRuleContext[] KEY_CONSTRAINTS_RHS = { ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.LT }; private static final ParserRuleContext[] FUNCTION_KEYWORD_RHS = { ParserRuleContext.FUNC_NAME, ParserRuleContext.OPEN_PARENTHESIS }; private static final ParserRuleContext[] TYPEDESC_RHS = { ParserRuleContext.END_OF_TYPE_DESC, ParserRuleContext.ARRAY_TYPE_DESCRIPTOR, ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR, ParserRuleContext.PIPE, ParserRuleContext.BITWISE_AND_OPERATOR }; private static final ParserRuleContext[] TABLE_TYPE_DESC_RHS = { ParserRuleContext.KEY_KEYWORD, ParserRuleContext.TYPEDESC_RHS }; private static final ParserRuleContext[] NEW_KEYWORD_RHS = { ParserRuleContext.TYPE_DESC_IN_NEW_EXPR, ParserRuleContext.EXPRESSION_RHS }; private static final ParserRuleContext[] TABLE_CONSTRUCTOR_OR_QUERY_START = { ParserRuleContext.TABLE_KEYWORD, ParserRuleContext.STREAM_KEYWORD, ParserRuleContext.QUERY_EXPRESSION }; private static final ParserRuleContext[] TABLE_CONSTRUCTOR_OR_QUERY_RHS = { ParserRuleContext.TABLE_CONSTRUCTOR, ParserRuleContext.QUERY_EXPRESSION }; private static final ParserRuleContext[] QUERY_EXPRESSION_RHS = { ParserRuleContext.SELECT_CLAUSE, ParserRuleContext.WHERE_CLAUSE, ParserRuleContext.FROM_CLAUSE, ParserRuleContext.LET_CLAUSE, ParserRuleContext.DO_CLAUSE, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.COMMA }; private static final ParserRuleContext[] ANNOTATION_REF_RHS = { ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.ANNOTATION_END }; private static final ParserRuleContext[] INFER_PARAM_END_OR_PARENTHESIS_END = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.EXPR_FUNC_BODY_START }; private static final ParserRuleContext[] OPTIONAL_PEER_WORKER = { ParserRuleContext.PEER_WORKER_NAME, ParserRuleContext.EXPRESSION_RHS }; private static final ParserRuleContext[] TYPE_DESC_IN_TUPLE_RHS = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.COMMA, ParserRuleContext.ELLIPSIS }; private static final ParserRuleContext[] LIST_CONSTRUCTOR_MEMBER_END = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.COMMA }; private static final ParserRuleContext[] NIL_OR_PARENTHESISED_TYPE_DESC_RHS = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.TYPE_DESCRIPTOR }; private static final ParserRuleContext[] BINDING_PATTERN = { ParserRuleContext.CAPTURE_BINDING_PATTERN, ParserRuleContext.LIST_BINDING_PATTERN, ParserRuleContext.MAPPING_BINDING_PATTERN}; private static final ParserRuleContext[] LIST_BINDING_PATTERN_CONTENTS = { ParserRuleContext.REST_BINDING_PATTERN, ParserRuleContext.BINDING_PATTERN }; private static final ParserRuleContext[] MAPPING_BINDING_PATTERN_CONTENTS = { ParserRuleContext.REST_BINDING_PATTERN, ParserRuleContext.FIELD_BINDING_PATTERN }; private static final ParserRuleContext[] LIST_BINDING_PATTERN_END_OR_CONTINUE = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACKET }; private static final ParserRuleContext[] MAPPING_BINDING_PATTERN_END_OR_CONTINUE = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACE}; private static final ParserRuleContext[] FIELD_BINDING_PATTERN_END_OR_CONTINUE = { ParserRuleContext.COMMA, ParserRuleContext.COLON, ParserRuleContext.CLOSE_BRACE}; private static final ParserRuleContext[] REMOTE_CALL_OR_ASYNC_SEND_RHS = { ParserRuleContext.WORKER_NAME_OR_METHOD_NAME, ParserRuleContext.DEFAULT_WORKER_NAME_IN_ASYNC_SEND }; private static final ParserRuleContext[] REMOTE_CALL_OR_ASYNC_SEND_END = { ParserRuleContext.ARG_LIST_START, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] RECEIVE_WORKERS = { ParserRuleContext.PEER_WORKER_NAME, ParserRuleContext.MULTI_RECEIVE_WORKERS }; private static final ParserRuleContext[] RECEIVE_FIELD = { ParserRuleContext.PEER_WORKER_NAME, ParserRuleContext.RECEIVE_FIELD_NAME }; private static final ParserRuleContext[] RECEIVE_FIELD_END = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.COMMA }; private static final ParserRuleContext[] WAIT_KEYWORD_RHS = { ParserRuleContext.MULTI_WAIT_FIELDS, ParserRuleContext.ALTERNATE_WAIT_EXPRS }; private static final ParserRuleContext[] WAIT_FIELD_NAME_RHS = { ParserRuleContext.COLON, ParserRuleContext.WAIT_FIELD_END }; private static final ParserRuleContext[] WAIT_FIELD_END = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.COMMA }; private static final ParserRuleContext[] WAIT_FUTURE_EXPR_END = { ParserRuleContext.ALTERNATE_WAIT_EXPR_LIST_END, ParserRuleContext.PIPE }; private static final ParserRuleContext[] ENUM_MEMBER_START = { ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.ENUM_MEMBER_NAME }; private static final ParserRuleContext[] ENUM_MEMBER_INTERNAL_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.ENUM_MEMBER_RHS }; private static final ParserRuleContext[] ENUM_MEMBER_RHS = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] MEMBER_ACCESS_KEY_EXPR_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACKET }; private static final ParserRuleContext[] ROLLBACK_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] RETRY_KEYWORD_RHS = { ParserRuleContext.LT, ParserRuleContext.RETRY_TYPE_PARAM_RHS }; private static final ParserRuleContext[] RETRY_TYPE_PARAM_RHS = { ParserRuleContext.ARG_LIST_START, ParserRuleContext.RETRY_BODY }; private static final ParserRuleContext[] RETRY_BODY = { ParserRuleContext.BLOCK_STMT, ParserRuleContext.TRANSACTION_STMT }; public BallerinaParserErrorHandler(AbstractTokenReader tokenReader) { super(tokenReader); } @Override protected boolean isProductionWithAlternatives(ParserRuleContext currentCtx) { switch (currentCtx) { case TOP_LEVEL_NODE: case TOP_LEVEL_NODE_WITHOUT_MODIFIER: case TOP_LEVEL_NODE_WITHOUT_METADATA: case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case FUNC_BODY_OR_TYPE_DESC_RHS: case VAR_DECL_STMT_RHS: case EXPRESSION_RHS: case PARAMETER_NAME_RHS: case ASSIGNMENT_OR_VAR_DECL_STMT: case AFTER_PARAMETER_TYPE: case FIELD_DESCRIPTOR_RHS: case RECORD_BODY_START: case RECORD_BODY_END: case TYPE_DESCRIPTOR: case NAMED_OR_POSITIONAL_ARG_RHS: case OBJECT_FIELD_RHS: case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: case OBJECT_MEMBER: case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: case ELSE_BODY: case IMPORT_DECL_RHS: case IMPORT_SUB_VERSION: case VERSION_NUMBER: case IMPORT_VERSION_DECL: case IMPORT_PREFIX_DECL: case MAPPING_FIELD: case FIRST_MAPPING_FIELD: case SPECIFIC_FIELD_RHS: case RESOURCE_DEF: case PARAMETER_WITHOUT_ANNOTS: case PARAMETER_START: case STMT_START_WITH_IDENTIFIER: case STMT_START_WITH_EXPR_RHS: case RECORD_FIELD_OR_RECORD_END: case CONST_DECL_TYPE: case CONST_DECL_RHS: case ANNOT_OPTIONAL_ATTACH_POINTS: case XML_NAMESPACE_PREFIX_DECL: case ANNOT_DECL_OPTIONAL_TYPE: case ANNOT_DECL_RHS: case TABLE_KEYWORD_RHS: case ARRAY_LENGTH: case TYPEDESC_RHS: case ERROR_TYPE_PARAMS: case STREAM_TYPE_FIRST_PARAM_RHS: case KEY_CONSTRAINTS_RHS: case TABLE_TYPE_DESC_RHS: case FUNC_BODY: case FUNC_OPTIONAL_RETURNS: case TERMINAL_EXPRESSION: case TABLE_CONSTRUCTOR_OR_QUERY_START: case TABLE_CONSTRUCTOR_OR_QUERY_RHS: case QUERY_PIPELINE_RHS: case ANON_FUNC_BODY: case BINDING_PATTERN: case LIST_BINDING_PATTERN_CONTENTS: case LIST_BINDING_PATTERN_END_OR_CONTINUE: case MAPPING_BINDING_PATTERN_CONTENTS: case MAPPING_BINDING_PATTERN_END_OR_CONTINUE: case FIELD_BINDING_PATTERN_END_OR_CONTINUE: case REMOTE_CALL_OR_ASYNC_SEND_RHS: case REMOTE_CALL_OR_ASYNC_SEND_END: case RECEIVE_FIELD_END: case RECEIVE_WORKERS: case WAIT_FIELD_NAME: case WAIT_FIELD_NAME_RHS: case WAIT_FIELD_END: case WAIT_FUTURE_EXPR_END: case MAPPING_FIELD_END: case ENUM_MEMBER_START: case ENUM_MEMBER_INTERNAL_RHS: case ENUM_MEMBER_RHS: return true; default: return false; } } private boolean isEndOfObjectTypeNode(int nextLookahead) { STToken nextToken = this.tokenReader.peek(nextLookahead); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: STToken nextNextToken = this.tokenReader.peek(nextLookahead + 1); switch (nextNextToken.kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } } /** * Search for a solution. * Terminals are directly matched and Non-terminals which have alternative productions are seekInAlternativesPaths() * * @param currentCtx Current context * @param lookahead Position of the next token to consider, relative to the position of the original error. * @param currentDepth Amount of distance traveled so far. * @return Recovery result */ @Override protected Result seekMatch(ParserRuleContext currentCtx, int lookahead, int currentDepth, boolean isEntryPoint) { boolean hasMatch; boolean skipRule; int matchingRulesCount = 0; while (currentDepth < lookaheadLimit) { hasMatch = true; skipRule = false; STToken nextToken = this.tokenReader.peek(lookahead); switch (currentCtx) { case EOF: hasMatch = nextToken.kind == SyntaxKind.EOF_TOKEN; break; case PUBLIC_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.PUBLIC_KEYWORD; break; case PRIVATE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.PRIVATE_KEYWORD; break; case REMOTE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.REMOTE_KEYWORD; break; case FUNCTION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FUNCTION_KEYWORD; break; case FUNC_NAME: case VARIABLE_NAME: case TYPE_NAME: case FIELD_OR_FUNC_NAME: case IMPORT_ORG_OR_MODULE_NAME: case IMPORT_MODULE_NAME: case IMPORT_PREFIX: case MAPPING_FIELD_NAME: case SERVICE_NAME: case QUALIFIED_IDENTIFIER: case IDENTIFIER: case ANNOTATION_TAG: case NAMESPACE_PREFIX: case WORKER_NAME: case IMPLICIT_ANON_FUNC_PARAM: case WORKER_NAME_OR_METHOD_NAME: case RECEIVE_FIELD_NAME: case WAIT_FIELD_NAME: hasMatch = nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case OPEN_PARENTHESIS: case PARENTHESISED_TYPE_DESC_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN; break; case CLOSE_PARENTHESIS: hasMatch = nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN; break; case RETURNS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RETURNS_KEYWORD; break; case SIMPLE_TYPE_DESCRIPTOR: hasMatch = BallerinaParser.isSimpleType(nextToken.kind) || nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case OPEN_BRACE: hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_TOKEN; break; case CLOSE_BRACE: hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN; break; case ASSIGN_OP: hasMatch = nextToken.kind == SyntaxKind.EQUAL_TOKEN; break; case EXTERNAL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.EXTERNAL_KEYWORD; break; case SEMICOLON: hasMatch = nextToken.kind == SyntaxKind.SEMICOLON_TOKEN; break; case BINARY_OPERATOR: hasMatch = isBinaryOperator(nextToken); break; case COMMA: hasMatch = nextToken.kind == SyntaxKind.COMMA_TOKEN; break; case CLOSED_RECORD_BODY_END: hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN; break; case CLOSED_RECORD_BODY_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN; break; case ELLIPSIS: hasMatch = nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN; break; case QUESTION_MARK: hasMatch = nextToken.kind == SyntaxKind.QUESTION_MARK_TOKEN; break; case RECORD_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RECORD_KEYWORD; break; case TYPE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TYPE_KEYWORD; break; case ARG_LIST_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN; break; case ARG_LIST_END: hasMatch = nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN; break; case OBJECT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.OBJECT_KEYWORD; break; case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: if (currentDepth == 0) { hasMatch = false; break; } hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD || nextToken.kind == SyntaxKind.CLIENT_KEYWORD; break; case ABSTRACT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD; break; case CLIENT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CLIENT_KEYWORD; break; case OPEN_BRACKET: case TUPLE_TYPE_DESC_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN; break; case CLOSE_BRACKET: hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACKET_TOKEN; break; case DOT: hasMatch = nextToken.kind == SyntaxKind.DOT_TOKEN; break; case IF_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IF_KEYWORD; break; case ELSE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ELSE_KEYWORD; break; case WHILE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WHILE_KEYWORD; break; case PANIC_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.PANIC_KEYWORD; break; case AS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.AS_KEYWORD; break; case LOCK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.LOCK_KEYWORD; break; case BOOLEAN_LITERAL: hasMatch = nextToken.kind == SyntaxKind.TRUE_KEYWORD || nextToken.kind == SyntaxKind.FALSE_KEYWORD; break; case DECIMAL_INTEGER_LITERAL: case MAJOR_VERSION: case MINOR_VERSION: case PATCH_VERSION: hasMatch = nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL; break; case IMPORT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IMPORT_KEYWORD; break; case SLASH: hasMatch = nextToken.kind == SyntaxKind.SLASH_TOKEN; break; case VERSION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.VERSION_KEYWORD; break; case CONTINUE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CONTINUE_KEYWORD; break; case BREAK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.BREAK_KEYWORD; break; case RETURN_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RETURN_KEYWORD; break; case BASIC_LITERAL: hasMatch = isBasicLiteral(nextToken.kind); break; case COLON: hasMatch = nextToken.kind == SyntaxKind.COLON_TOKEN; break; case STRING_LITERAL: hasMatch = nextToken.kind == SyntaxKind.STRING_LITERAL; break; case SERVICE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.SERVICE_KEYWORD; break; case ON_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ON_KEYWORD; break; case RESOURCE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RESOURCE_KEYWORD; break; case LISTENER_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.LISTENER_KEYWORD; break; case CONST_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CONST_KEYWORD; break; case FINAL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FINAL_KEYWORD; break; case TYPEOF_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TYPEOF_KEYWORD; break; case UNARY_OPERATOR: hasMatch = isUnaryOperator(nextToken); break; case HEX_INTEGER_LITERAL: hasMatch = nextToken.kind == SyntaxKind.HEX_INTEGER_LITERAL; break; case AT: hasMatch = nextToken.kind == SyntaxKind.AT_TOKEN; break; case IS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IS_KEYWORD; break; case RIGHT_ARROW: hasMatch = nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN; break; case PARAMETERIZED_TYPE: hasMatch = isParameterizedTypeToken(nextToken.kind); break; case LT: hasMatch = nextToken.kind == SyntaxKind.LT_TOKEN; break; case GT: hasMatch = nextToken.kind == SyntaxKind.GT_TOKEN; break; case NULL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.NULL_KEYWORD; break; case ANNOTATION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ANNOTATION_KEYWORD; break; case FIELD_IDENT: hasMatch = nextToken.kind == SyntaxKind.FIELD_KEYWORD; break; case FUNCTION_IDENT: hasMatch = nextToken.kind == SyntaxKind.FUNCTION_KEYWORD; break; case IDENT_AFTER_OBJECT_IDENT: hasMatch = nextToken.kind == SyntaxKind.TYPE_KEYWORD || nextToken.kind == SyntaxKind.FUNCTION_KEYWORD || nextToken.kind == SyntaxKind.FIELD_KEYWORD; break; case SOURCE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.SOURCE_KEYWORD; break; case SINGLE_KEYWORD_ATTACH_POINT_IDENT: hasMatch = isSingleKeywordAttachPointIdent(nextToken.kind); break; case OBJECT_IDENT: hasMatch = nextToken.kind == SyntaxKind.OBJECT_KEYWORD; break; case RECORD_IDENT: hasMatch = nextToken.kind == SyntaxKind.RECORD_KEYWORD; break; case RESOURCE_IDENT: hasMatch = nextToken.kind == SyntaxKind.RESOURCE_KEYWORD; break; case XMLNS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.XMLNS_KEYWORD; break; case WORKER_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WORKER_KEYWORD; break; case FORK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FORK_KEYWORD; break; case DECIMAL_FLOATING_POINT_LITERAL: hasMatch = nextToken.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL; break; case HEX_FLOATING_POINT_LITERAL: hasMatch = nextToken.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL; break; case TRAP_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TRAP_KEYWORD; break; case FOREACH_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FOREACH_KEYWORD; break; case IN_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IN_KEYWORD; break; case PIPE: hasMatch = nextToken.kind == SyntaxKind.PIPE_TOKEN; break; case TABLE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TABLE_KEYWORD; break; case KEY_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.KEY_KEYWORD; break; case ERROR_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ERROR_KEYWORD; break; case LET_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.LET_KEYWORD; break; case STREAM_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.STREAM_KEYWORD; break; case TEMPLATE_START: case TEMPLATE_END: hasMatch = nextToken.kind == SyntaxKind.BACKTICK_TOKEN; break; case XML_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.XML_KEYWORD; break; case STRING_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.XML_KEYWORD; break; case ASTERISK: case INFERRED_TYPE_DESC: hasMatch = nextToken.kind == SyntaxKind.ASTERISK_TOKEN; break; case NEW_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.NEW_KEYWORD; break; case SELECT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.SELECT_KEYWORD; break; case WHERE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WHERE_KEYWORD; break; case FROM_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FROM_KEYWORD; break; case BITWISE_AND_OPERATOR: hasMatch = nextToken.kind == SyntaxKind.BITWISE_AND_TOKEN; break; case EXPR_FUNC_BODY_START: hasMatch = nextToken.kind == SyntaxKind.RIGHT_DOUBLE_ARROW; break; case START_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.START_KEYWORD; break; case FLUSH_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FLUSH_KEYWORD; break; case DEFAULT_KEYWORD: case DEFAULT_WORKER_NAME_IN_ASYNC_SEND: hasMatch = nextToken.kind == SyntaxKind.DEFAULT_KEYWORD; break; case PLUS_TOKEN: hasMatch = nextToken.kind == SyntaxKind.PLUS_TOKEN; break; case MINUS_TOKEN: hasMatch = nextToken.kind == SyntaxKind.MINUS_TOKEN; break; case SIGNED_INT_OR_FLOAT_RHS: hasMatch = BallerinaParser.isIntOrFloat(nextToken); break; case SYNC_SEND_TOKEN: hasMatch = nextToken.kind == SyntaxKind.SYNC_SEND_TOKEN; break; case PEER_WORKER_NAME: hasMatch = nextToken.kind == SyntaxKind.DEFAULT_KEYWORD || nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case LEFT_ARROW_TOKEN: hasMatch = nextToken.kind == SyntaxKind.LEFT_ARROW_TOKEN; break; case WAIT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WAIT_KEYWORD; break; case CHECKING_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CHECK_KEYWORD || nextToken.kind == SyntaxKind.CHECKPANIC_KEYWORD; break; case DO_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.DO_KEYWORD; break; case ANNOT_CHAINING_TOKEN: hasMatch = nextToken.kind == SyntaxKind.ANNOT_CHAINING_TOKEN; break; case OPTIONAL_CHAINING_TOKEN: hasMatch = nextToken.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN; break; case TRANSACTION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TRANSACTION_KEYWORD; break; case COMMIT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.COMMIT_KEYWORD; break; case RETRY_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RETRY_KEYWORD; break; case ROLLBACK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ROLLBACK_KEYWORD; break; case TRANSACTIONAL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TRANSACTIONAL_KEYWORD; break; case ENUM_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ENUM_KEYWORD; break; case MODULE_ENUM_NAME: case ENUM_MEMBER_NAME: hasMatch = nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case VARIABLE_REF: case TYPE_REFERENCE: case ANNOT_REFERENCE: case ANNOT_TAG_REFERENCE: case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: default: if (hasAlternativePaths(currentCtx)) { return seekMatchInAlternativePaths(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); } skipRule = true; hasMatch = true; break; } if (!hasMatch) { return fixAndContinue(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); } currentCtx = getNextRule(currentCtx, lookahead + 1); if (!skipRule) { currentDepth++; matchingRulesCount++; lookahead++; isEntryPoint = false; } } Result result = new Result(new ArrayDeque<>(), matchingRulesCount); result.solution = new Solution(Action.KEEP, currentCtx, SyntaxKind.NONE, currentCtx.toString()); return result; } private boolean hasAlternativePaths(ParserRuleContext currentCtx) { switch (currentCtx) { case TOP_LEVEL_NODE: case TOP_LEVEL_NODE_WITHOUT_MODIFIER: case TOP_LEVEL_NODE_WITHOUT_METADATA: case FUNC_OPTIONAL_RETURNS: case FUNC_BODY_OR_TYPE_DESC_RHS: case ANON_FUNC_BODY: case FUNC_BODY: case OBJECT_FUNC_BODY: case EXPRESSION: case TERMINAL_EXPRESSION: case VAR_DECL_STMT_RHS: case EXPRESSION_RHS: case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case PARAM_LIST: case REQUIRED_PARAM_NAME_RHS: case STATEMENT_START_IDENTIFIER: case ASSIGNMENT_OR_VAR_DECL_STMT_RHS: case FIELD_DESCRIPTOR_RHS: case FIELD_OR_REST_DESCIPTOR_RHS: case RECORD_BODY_END: case RECORD_BODY_START: case TYPE_DESCRIPTOR: case RECORD_FIELD_OR_RECORD_END: case RECORD_FIELD_START: case RECORD_FIELD_WITHOUT_METADATA: case ARG_START: case ARG_START_OR_ARG_LIST_END: case NAMED_OR_POSITIONAL_ARG_RHS: case ARG_END: case OBJECT_MEMBER_START: case OBJECT_MEMBER_WITHOUT_METADATA: case OBJECT_FIELD_RHS: case OBJECT_METHOD_START: case OBJECT_FUNC_OR_FIELD: case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: case OBJECT_TYPE_DESCRIPTOR_START: case ELSE_BLOCK: case ELSE_BODY: case CALL_STMT_START: case IMPORT_PREFIX_DECL: case IMPORT_VERSION_DECL: case IMPORT_DECL_RHS: case AFTER_IMPORT_MODULE_NAME: case MAJOR_MINOR_VERSION_END: case RETURN_STMT_RHS: case ACCESS_EXPRESSION: case FIRST_MAPPING_FIELD: case MAPPING_FIELD: case SPECIFIC_FIELD_RHS: case MAPPING_FIELD_END: case OPTIONAL_SERVICE_NAME: case RESOURCE_DEF: case CONST_DECL_TYPE: case CONST_DECL_RHS: case ARRAY_LENGTH: case PARAMETER_START: case PARAMETER_WITHOUT_ANNOTS: case STMT_START_WITH_EXPR_RHS: case STMT_START_WITH_IDENTIFIER: case EXPRESSION_STATEMENT_START: case ANNOT_DECL_OPTIONAL_TYPE: case ANNOT_DECL_RHS: case ANNOT_OPTIONAL_ATTACH_POINTS: case ATTACH_POINT: case ATTACH_POINT_IDENT: case ATTACH_POINT_END: case XML_NAMESPACE_PREFIX_DECL: case CONSTANT_EXPRESSION_START: case TYPEDESC_RHS: case LIST_CONSTRUCTOR_FIRST_MEMBER: case TYPE_CAST_PARAM: case TYPE_CAST_PARAM_RHS: case TABLE_KEYWORD_RHS: case ROW_LIST_RHS: case TABLE_ROW_END: case KEY_SPECIFIER_RHS: case TABLE_KEY_RHS: case ERROR_TYPE_PARAMS: case LET_VAR_DECL_START: case STREAM_TYPE_FIRST_PARAM_RHS: case TEMPLATE_MEMBER: case TEMPLATE_STRING_RHS: case FUNCTION_KEYWORD_RHS: case WORKER_NAME_RHS: case BINDING_PATTERN: case LIST_BINDING_PATTERN_END_OR_CONTINUE: case FIELD_BINDING_PATTERN_END_OR_CONTINUE: case LIST_BINDING_PATTERN_CONTENTS: case MAPPING_BINDING_PATTERN_END_OR_CONTINUE: case MAPPING_BINDING_PATTERN_CONTENTS: case KEY_CONSTRAINTS_RHS: case TABLE_TYPE_DESC_RHS: case NEW_KEYWORD_RHS: case TABLE_CONSTRUCTOR_OR_QUERY_START: case TABLE_CONSTRUCTOR_OR_QUERY_RHS: case QUERY_PIPELINE_RHS: case BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS: case ANON_FUNC_PARAM_RHS: case PARAM_END: case ANNOTATION_REF_RHS: case INFER_PARAM_END_OR_PARENTHESIS_END: case TYPE_DESC_IN_TUPLE_RHS: case LIST_CONSTRUCTOR_MEMBER_END: case NIL_OR_PARENTHESISED_TYPE_DESC_RHS: case REMOTE_CALL_OR_ASYNC_SEND_RHS: case REMOTE_CALL_OR_ASYNC_SEND_END: case RECEIVE_WORKERS: case RECEIVE_FIELD: case RECEIVE_FIELD_END: case WAIT_KEYWORD_RHS: case WAIT_FIELD_NAME_RHS: case WAIT_FIELD_END: case WAIT_FUTURE_EXPR_END: case OPTIONAL_PEER_WORKER: case ENUM_MEMBER_START: case ENUM_MEMBER_INTERNAL_RHS: case ENUM_MEMBER_RHS: case MEMBER_ACCESS_KEY_EXPR_END: case ROLLBACK_RHS: case RETRY_KEYWORD_RHS: case RETRY_TYPE_PARAM_RHS: case RETRY_BODY: return true; default: return false; } } private Result seekMatchInAlternativePaths(ParserRuleContext currentCtx, int lookahead, int currentDepth, int matchingRulesCount, boolean isEntryPoint) { ParserRuleContext[] alternativeRules; switch (currentCtx) { case TOP_LEVEL_NODE: alternativeRules = TOP_LEVEL_NODE; break; case TOP_LEVEL_NODE_WITHOUT_MODIFIER: alternativeRules = TOP_LEVEL_NODE_WITHOUT_MODIFIER; break; case TOP_LEVEL_NODE_WITHOUT_METADATA: alternativeRules = TOP_LEVEL_NODE_WITHOUT_METADATA; break; case FUNC_OPTIONAL_RETURNS: ParserRuleContext parentCtx = getParentContext(); ParserRuleContext[] alternatives; if (parentCtx == ParserRuleContext.FUNC_DEF) { alternatives = FUNC_DEF_OPTIONAL_RETURNS; } else if (parentCtx == ParserRuleContext.ANON_FUNC_EXPRESSION) { alternatives = ANNON_FUNC_OPTIONAL_RETURNS; } else if (parentCtx == ParserRuleContext.FUNC_TYPE_DESC) { alternatives = FUNC_TYPE_OPTIONAL_RETURNS; } else { alternatives = FUNC_TYPE_OR_DEF_OPTIONAL_RETURNS; } alternativeRules = alternatives; break; case FUNC_BODY_OR_TYPE_DESC_RHS: alternativeRules = FUNC_BODY_OR_TYPE_DESC_RHS; break; case ANON_FUNC_BODY: alternativeRules = ANON_FUNC_BODY; break; case FUNC_BODY: case OBJECT_FUNC_BODY: if (getGrandParentContext() == ParserRuleContext.OBJECT_MEMBER) { alternativeRules = OBJECT_FUNC_BODY; } else { alternativeRules = FUNC_BODY; } break; case EXPRESSION: case TERMINAL_EXPRESSION: alternativeRules = EXPRESSION_START; break; case VAR_DECL_STMT_RHS: alternativeRules = VAR_DECL_RHS; break; case EXPRESSION_RHS: return seekMatchInExpressionRhs(lookahead, currentDepth, matchingRulesCount, isEntryPoint); case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: return seekInStatements(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); case PARAM_LIST: alternativeRules = PARAM_LIST; break; case REQUIRED_PARAM_NAME_RHS: alternativeRules = REQUIRED_PARAM_NAME_RHS; break; case STATEMENT_START_IDENTIFIER: alternativeRules = TYPE_OR_VAR_NAME; break; case ASSIGNMENT_OR_VAR_DECL_STMT_RHS: alternativeRules = ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN; break; case FIELD_DESCRIPTOR_RHS: alternativeRules = FIELD_DESCRIPTOR_RHS; break; case FIELD_OR_REST_DESCIPTOR_RHS: alternativeRules = FIELD_OR_REST_DESCIPTOR_RHS; break; case RECORD_BODY_END: alternativeRules = RECORD_BODY_END; break; case RECORD_BODY_START: alternativeRules = RECORD_BODY_START; break; case TYPE_DESCRIPTOR: alternativeRules = TYPE_DESCRIPTORS; break; case RECORD_FIELD_OR_RECORD_END: alternativeRules = RECORD_FIELD_OR_RECORD_END; break; case RECORD_FIELD_START: alternativeRules = RECORD_FIELD_START; break; case RECORD_FIELD_WITHOUT_METADATA: alternativeRules = RECORD_FIELD_WITHOUT_METADATA; break; case ARG_START: alternativeRules = ARG_START; break; case ARG_START_OR_ARG_LIST_END: alternativeRules = ARG_START_OR_ARG_LIST_END; break; case NAMED_OR_POSITIONAL_ARG_RHS: alternativeRules = NAMED_OR_POSITIONAL_ARG_RHS; break; case ARG_END: alternativeRules = ARG_END; break; case OBJECT_MEMBER_START: alternativeRules = OBJECT_MEMBER_START; break; case OBJECT_MEMBER_WITHOUT_METADATA: alternativeRules = OBJECT_MEMBER_WITHOUT_METADATA; break; case OBJECT_FIELD_RHS: alternativeRules = OBJECT_FIELD_RHS; break; case OBJECT_METHOD_START: alternativeRules = OBJECT_METHOD_START; break; case OBJECT_FUNC_OR_FIELD: alternativeRules = OBJECT_FUNC_OR_FIELD; break; case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: alternativeRules = OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY; break; case OBJECT_TYPE_DESCRIPTOR_START: alternativeRules = OBJECT_TYPE_DESCRIPTOR_START; break; case ELSE_BLOCK: alternativeRules = ELSE_BLOCK; break; case ELSE_BODY: alternativeRules = ELSE_BODY; break; case CALL_STMT_START: alternativeRules = CALL_STATEMENT; break; case IMPORT_PREFIX_DECL: alternativeRules = IMPORT_PREFIX_DECL; break; case IMPORT_VERSION_DECL: alternativeRules = IMPORT_VERSION; break; case IMPORT_DECL_RHS: alternativeRules = IMPORT_DECL_RHS; break; case AFTER_IMPORT_MODULE_NAME: alternativeRules = AFTER_IMPORT_MODULE_NAME; break; case MAJOR_MINOR_VERSION_END: alternativeRules = MAJOR_MINOR_VERSION_END; break; case RETURN_STMT_RHS: alternativeRules = RETURN_RHS; break; case ACCESS_EXPRESSION: return seekInAccessExpression(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); case FIRST_MAPPING_FIELD: alternativeRules = FIRST_MAPPING_FIELD_START; break; case MAPPING_FIELD: alternativeRules = MAPPING_FIELD_START; break; case SPECIFIC_FIELD_RHS: alternativeRules = SPECIFIC_FIELD_RHS; break; case MAPPING_FIELD_END: alternativeRules = MAPPING_FIELD_END; break; case OPTIONAL_SERVICE_NAME: alternativeRules = OPTIONAL_SERVICE_NAME; break; case RESOURCE_DEF: alternativeRules = RESOURCE_DEF_START; break; case CONST_DECL_TYPE: alternativeRules = CONST_DECL_TYPE; break; case CONST_DECL_RHS: alternativeRules = CONST_DECL_RHS; break; case ARRAY_LENGTH: alternativeRules = ARRAY_LENGTH; break; case PARAMETER_START: alternativeRules = PARAMETER_START; break; case PARAMETER_WITHOUT_ANNOTS: alternativeRules = PARAMETER_WITHOUT_ANNOTS; break; case STMT_START_WITH_EXPR_RHS: alternativeRules = STMT_START_WITH_EXPR_RHS; break; case STMT_START_WITH_IDENTIFIER: alternativeRules = STMT_START_WITH_IDENTIFIER; break; case EXPRESSION_STATEMENT_START: alternativeRules = EXPRESSION_STATEMENT_START; break; case ANNOT_DECL_OPTIONAL_TYPE: alternativeRules = ANNOT_DECL_OPTIONAL_TYPE; break; case ANNOT_DECL_RHS: alternativeRules = ANNOT_DECL_RHS; break; case ANNOT_OPTIONAL_ATTACH_POINTS: alternativeRules = ANNOT_OPTIONAL_ATTACH_POINTS; break; case ATTACH_POINT: alternativeRules = ATTACH_POINT; break; case ATTACH_POINT_IDENT: alternativeRules = ATTACH_POINT_IDENT; break; case ATTACH_POINT_END: alternativeRules = ATTACH_POINT_END; break; case XML_NAMESPACE_PREFIX_DECL: alternativeRules = XML_NAMESPACE_PREFIX_DECL; break; case CONSTANT_EXPRESSION_START: alternativeRules = CONSTANT_EXPRESSION; break; case TYPEDESC_RHS: alternativeRules = TYPEDESC_RHS; break; case LIST_CONSTRUCTOR_FIRST_MEMBER: alternativeRules = LIST_CONSTRUCTOR_RHS; break; case TYPE_CAST_PARAM: alternativeRules = TYPE_CAST_PARAM; break; case TYPE_CAST_PARAM_RHS: alternativeRules = TYPE_CAST_PARAM_RHS; break; case TABLE_KEYWORD_RHS: alternativeRules = TABLE_KEYWORD_RHS; break; case ROW_LIST_RHS: alternativeRules = ROW_LIST_RHS; break; case TABLE_ROW_END: alternativeRules = TABLE_ROW_END; break; case KEY_SPECIFIER_RHS: alternativeRules = KEY_SPECIFIER_RHS; break; case TABLE_KEY_RHS: alternativeRules = TABLE_KEY_RHS; break; case ERROR_TYPE_PARAMS: alternativeRules = ERROR_TYPE_PARAMS; break; case LET_VAR_DECL_START: alternativeRules = LET_VAR_DECL_START; break; case STREAM_TYPE_FIRST_PARAM_RHS: alternativeRules = STREAM_TYPE_FIRST_PARAM_RHS; break; case TEMPLATE_MEMBER: alternativeRules = TEMPLATE_MEMBER; break; case TEMPLATE_STRING_RHS: alternativeRules = TEMPLATE_STRING_RHS; break; case FUNCTION_KEYWORD_RHS: alternativeRules = FUNCTION_KEYWORD_RHS; break; case WORKER_NAME_RHS: alternativeRules = WORKER_NAME_RHS; break; case BINDING_PATTERN: alternativeRules = BINDING_PATTERN; break; case LIST_BINDING_PATTERN_END_OR_CONTINUE: alternativeRules = LIST_BINDING_PATTERN_END_OR_CONTINUE; break; case LIST_BINDING_PATTERN_CONTENTS: alternativeRules = LIST_BINDING_PATTERN_CONTENTS; break; case MAPPING_BINDING_PATTERN_END_OR_CONTINUE: alternativeRules = MAPPING_BINDING_PATTERN_END_OR_CONTINUE; break; case FIELD_BINDING_PATTERN_END_OR_CONTINUE: alternativeRules = FIELD_BINDING_PATTERN_END_OR_CONTINUE; break; case MAPPING_BINDING_PATTERN_CONTENTS: alternativeRules = MAPPING_BINDING_PATTERN_CONTENTS; break; case KEY_CONSTRAINTS_RHS: alternativeRules = KEY_CONSTRAINTS_RHS; break; case TABLE_TYPE_DESC_RHS: alternativeRules = TABLE_TYPE_DESC_RHS; break; case NEW_KEYWORD_RHS: alternativeRules = NEW_KEYWORD_RHS; break; case TABLE_CONSTRUCTOR_OR_QUERY_START: alternativeRules = TABLE_CONSTRUCTOR_OR_QUERY_START; break; case TABLE_CONSTRUCTOR_OR_QUERY_RHS: alternativeRules = TABLE_CONSTRUCTOR_OR_QUERY_RHS; break; case QUERY_PIPELINE_RHS: alternativeRules = QUERY_EXPRESSION_RHS; break; case BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS: case ANON_FUNC_PARAM_RHS: alternativeRules = BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS; break; case PARAM_END: alternativeRules = PARAM_END; break; case ANNOTATION_REF_RHS: alternativeRules = ANNOTATION_REF_RHS; break; case INFER_PARAM_END_OR_PARENTHESIS_END: alternativeRules = INFER_PARAM_END_OR_PARENTHESIS_END; break; case TYPE_DESC_IN_TUPLE_RHS: alternativeRules = TYPE_DESC_IN_TUPLE_RHS; break; case LIST_CONSTRUCTOR_MEMBER_END: alternativeRules = LIST_CONSTRUCTOR_MEMBER_END; break; case NIL_OR_PARENTHESISED_TYPE_DESC_RHS: alternativeRules = NIL_OR_PARENTHESISED_TYPE_DESC_RHS; break; case REMOTE_CALL_OR_ASYNC_SEND_RHS: alternativeRules = REMOTE_CALL_OR_ASYNC_SEND_RHS; break; case REMOTE_CALL_OR_ASYNC_SEND_END: alternativeRules = REMOTE_CALL_OR_ASYNC_SEND_END; break; case RECEIVE_WORKERS: alternativeRules = RECEIVE_WORKERS; break; case RECEIVE_FIELD: alternativeRules = RECEIVE_FIELD; break; case RECEIVE_FIELD_END: alternativeRules = RECEIVE_FIELD_END; break; case WAIT_KEYWORD_RHS: alternativeRules = WAIT_KEYWORD_RHS; break; case WAIT_FIELD_NAME_RHS: alternativeRules = WAIT_FIELD_NAME_RHS; break; case WAIT_FIELD_END: alternativeRules = WAIT_FIELD_END; break; case WAIT_FUTURE_EXPR_END: alternativeRules = WAIT_FUTURE_EXPR_END; break; case OPTIONAL_PEER_WORKER: alternativeRules = OPTIONAL_PEER_WORKER; break; case ENUM_MEMBER_START: alternativeRules = ENUM_MEMBER_START; break; case ENUM_MEMBER_INTERNAL_RHS: alternativeRules = ENUM_MEMBER_INTERNAL_RHS; break; case ENUM_MEMBER_RHS: alternativeRules = ENUM_MEMBER_RHS; break; case MEMBER_ACCESS_KEY_EXPR_END: alternativeRules = MEMBER_ACCESS_KEY_EXPR_END; break; case ROLLBACK_RHS: alternativeRules = ROLLBACK_RHS; break; case RETRY_KEYWORD_RHS: alternativeRules = RETRY_KEYWORD_RHS; break; case RETRY_TYPE_PARAM_RHS: alternativeRules = RETRY_TYPE_PARAM_RHS; break; case RETRY_BODY: alternativeRules = RETRY_BODY; break; default: throw new IllegalStateException(); } return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, alternativeRules, isEntryPoint); } /** * Search for matching token sequences within different kinds of statements and returns the most optimal solution. * * @param currentCtx Current context * @param lookahead Position of the next token to consider, relative to the position of the original error * @param currentDepth Amount of distance traveled so far * @param currentMatches Matching tokens found so far * @param fixes Fixes made so far * @return Recovery result */ private Result seekInStatements(ParserRuleContext currentCtx, int lookahead, int currentDepth, int currentMatches, boolean isEntryPoint) { STToken nextToken = this.tokenReader.peek(lookahead);; if (nextToken.kind == SyntaxKind.SEMICOLON_TOKEN) { Result result = seekMatchInSubTree(ParserRuleContext.STATEMENT, lookahead + 1, currentDepth, isEntryPoint); result.fixes.push(new Solution(Action.REMOVE, currentCtx, nextToken.kind, nextToken.toString())); return getFinalResult(currentMatches, result); } return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, STATEMENTS, isEntryPoint); } /** * Search for matching token sequences within access expressions and returns the most optimal solution. * Access expression can be one of: method-call, field-access, member-access. * * @param currentCtx Current context * @param lookahead Position of the next token to consider, relative to the position of the original error * @param currentDepth Amount of distance traveled so far * @param currentMatches Matching tokens found so far * @param fixes Fixes made so far * @param isEntryPoint * @return Recovery result */ private Result seekInAccessExpression(ParserRuleContext currentCtx, int lookahead, int currentDepth, int currentMatches, boolean isEntryPoint) { STToken nextToken = this.tokenReader.peek(lookahead); currentDepth++; if (nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth); return getFinalResult(currentMatches, fixedPathResult); } ParserRuleContext nextContext; STToken nextNextToken = this.tokenReader.peek(lookahead + 1); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: nextContext = ParserRuleContext.OPEN_PARENTHESIS; break; case DOT_TOKEN: nextContext = ParserRuleContext.DOT; break; case OPEN_BRACKET_TOKEN: nextContext = ParserRuleContext.MEMBER_ACCESS_KEY_EXPR; break; default: nextContext = getNextRuleForExpr(); break; } currentMatches++; lookahead++; Result result = seekMatch(nextContext, lookahead, currentDepth, isEntryPoint); return getFinalResult(currentMatches, result); } /** * Search for a match in rhs of an expression. RHS of an expression can be the end * of the expression or the rhs of a binary expression. * * @param lookahead Position of the next token to consider, relative to the position of the original error * @param currentDepth Amount of distance traveled so far * @param currentMatches Matching tokens found so far * @param isEntryPoint * @return Recovery result */ private Result seekMatchInExpressionRhs(int lookahead, int currentDepth, int currentMatches, boolean isEntryPoint) { ParserRuleContext parentCtx = getParentContext(); ParserRuleContext[] next; switch (parentCtx) { case ARG_LIST: next = new ParserRuleContext[] { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.COMMA, ParserRuleContext.ARG_LIST_START, ParserRuleContext.ARG_LIST_END }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case MAPPING_CONSTRUCTOR: case MULTI_WAIT_FIELDS: next = new ParserRuleContext[] { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.COMMA, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case COMPUTED_FIELD_NAME: next = new ParserRuleContext[] { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.OPEN_BRACKET, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LISTENERS_LIST: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.OPEN_BRACE, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LIST_CONSTRUCTOR: case MEMBER_ACCESS_KEY_EXPR: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LET_EXPR_LET_VAR_DECL: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.IN_KEYWORD, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LET_CLAUSE_LET_VAR_DECL: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.LET_CLAUSE_END, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case QUERY_EXPRESSION: next = new ParserRuleContext[] { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.QUERY_PIPELINE_RHS, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); default: if (isParameter(parentCtx)) { next = new ParserRuleContext[] { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.COMMA, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); } break; } ParserRuleContext nextContext; if (parentCtx == ParserRuleContext.IF_BLOCK || parentCtx == ParserRuleContext.WHILE_BLOCK || parentCtx == ParserRuleContext.FOREACH_STMT) { nextContext = ParserRuleContext.BLOCK_STMT; } else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.RECORD_FIELD || parentCtx == ParserRuleContext.OBJECT_MEMBER || parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL) { nextContext = ParserRuleContext.SEMICOLON; } else if (parentCtx == ParserRuleContext.ANNOTATIONS) { nextContext = ParserRuleContext.ANNOTATION_END; } else if (parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { nextContext = ParserRuleContext.CLOSE_BRACKET; } else if (parentCtx == ParserRuleContext.INTERPOLATION) { nextContext = ParserRuleContext.CLOSE_BRACE; } else if (parentCtx == ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS) { nextContext = ParserRuleContext.CLOSE_PARENTHESIS; } else if (parentCtx == ParserRuleContext.FUNC_DEF) { nextContext = ParserRuleContext.SEMICOLON; } else if (parentCtx == ParserRuleContext.ALTERNATE_WAIT_EXPRS) { nextContext = ParserRuleContext.ALTERNATE_WAIT_EXPR_LIST_END; } else if (parentCtx == ParserRuleContext.CONDITIONAL_EXPRESSION) { nextContext = ParserRuleContext.COLON; } else if (parentCtx == ParserRuleContext.ENUM_MEMBER_LIST) { nextContext = ParserRuleContext.ENUM_MEMBER_RHS; } else { throw new IllegalStateException(parentCtx.toString()); } ParserRuleContext[] alternatives = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.IS_KEYWORD, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.RIGHT_ARROW, ParserRuleContext.SYNC_SEND_TOKEN, nextContext, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, alternatives, isEntryPoint); } /** * Get the next parser rule/context given the current parser context. * * @param currentCtx Current parser context * @param nextLookahead Position of the next token to consider, relative to the position of the original error * @return Next parser context */ @Override protected ParserRuleContext getNextRule(ParserRuleContext currentCtx, int nextLookahead) { startContextIfRequired(currentCtx); ParserRuleContext parentCtx; STToken nextToken; switch (currentCtx) { case EOF: return ParserRuleContext.EOF; case COMP_UNIT: return ParserRuleContext.TOP_LEVEL_NODE; case PUBLIC_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.OBJECT_TYPE_DESCRIPTOR || parentCtx == ParserRuleContext.OBJECT_MEMBER) { return ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY; } else if (isParameter(parentCtx)) { return ParserRuleContext.TYPE_DESC_IN_PARAM; } return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER; case PRIVATE_KEYWORD: return ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY; case FUNC_DEF: case FUNC_DEF_OR_FUNC_TYPE: case FUNC_TYPE_DESC: case ANON_FUNC_EXPRESSION: return ParserRuleContext.FUNCTION_KEYWORD; case EXTERNAL_FUNC_BODY: return ParserRuleContext.ASSIGN_OP; case FUNC_BODY_BLOCK: return ParserRuleContext.OPEN_BRACE; case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: endContext(); return ParserRuleContext.CLOSE_BRACE; case ASSIGN_OP: return getNextRuleForEqualOp(); case COMPOUND_BINARY_OPERATOR: return ParserRuleContext.ASSIGN_OP; case CLOSE_BRACE: return getNextRuleForCloseBrace(nextLookahead); case CLOSE_PARENTHESIS: return getNextRuleForCloseParenthsis(); case EXPRESSION: case BASIC_LITERAL: case TERMINAL_EXPRESSION: return getNextRuleForExpr(); case EXTERNAL_KEYWORD: return ParserRuleContext.SEMICOLON; case FUNCTION_KEYWORD: return ParserRuleContext.FUNCTION_KEYWORD_RHS; case FUNC_NAME: return ParserRuleContext.OPEN_PARENTHESIS; case OPEN_BRACE: return getNextRuleForOpenBrace(nextLookahead); case OPEN_PARENTHESIS: return getNextRuleForOpenParenthesis(); case RETURNS_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC; case SEMICOLON: return getNextRuleForSemicolon(nextLookahead); case SIMPLE_TYPE_DESCRIPTOR: return ParserRuleContext.TYPEDESC_RHS; case VARIABLE_NAME: case PARAMETER_NAME_RHS: return getNextRuleForVarName(); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE; case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: return ParserRuleContext.TYPE_DESC_IN_PARAM; case ASSIGNMENT_STMT: return ParserRuleContext.VARIABLE_NAME; case COMPOUND_ASSIGNMENT_STMT: return ParserRuleContext.VARIABLE_NAME; case VAR_DECL_STMT: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case EXPRESSION_RHS: return ParserRuleContext.BINARY_OPERATOR; case BINARY_OPERATOR: return ParserRuleContext.EXPRESSION; case COMMA: return getNextRuleForComma(); case AFTER_PARAMETER_TYPE: return getNextRuleForParamType(); case MODULE_TYPE_DEFINITION: return ParserRuleContext.TYPE_KEYWORD; case CLOSED_RECORD_BODY_END: endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TYPEDESC_RHS; case CLOSED_RECORD_BODY_START: return ParserRuleContext.RECORD_FIELD_OR_RECORD_END; case ELLIPSIS: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR || parentCtx == ParserRuleContext.ARG_LIST) { return ParserRuleContext.EXPRESSION; } if (parentCtx == ParserRuleContext.TYPE_DESC_IN_TUPLE) { return ParserRuleContext.CLOSE_PARENTHESIS; } return ParserRuleContext.VARIABLE_NAME; case QUESTION_MARK: return getNextRuleForQuestionMark(); case RECORD_KEYWORD: return ParserRuleContext.RECORD_BODY_START; case TYPE_KEYWORD: return ParserRuleContext.TYPE_NAME; case RECORD_TYPE_DESCRIPTOR: return ParserRuleContext.RECORD_KEYWORD; case ASTERISK: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { return ParserRuleContext.CLOSE_BRACKET; } return ParserRuleContext.TYPE_REFERENCE; case TYPE_NAME: return ParserRuleContext.TYPE_DESC_IN_TYPE_DEF; case OBJECT_KEYWORD: return ParserRuleContext.OPEN_BRACE; case REMOTE_KEYWORD: return ParserRuleContext.FUNCTION_KEYWORD; case OBJECT_TYPE_DESCRIPTOR: return ParserRuleContext.OBJECT_TYPE_DESCRIPTOR_START; case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: return ParserRuleContext.OBJECT_KEYWORD; case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return ParserRuleContext.OBJECT_KEYWORD; case OPEN_BRACKET: return getNextRuleForOpenBracket(); case CLOSE_BRACKET: return getNextRuleForCloseBracket(); case FIELD_OR_FUNC_NAME: return ParserRuleContext.EXPRESSION_RHS; case DOT: return getNextRuleForDot(); case IF_KEYWORD: return ParserRuleContext.EXPRESSION; case ELSE_KEYWORD: return ParserRuleContext.ELSE_BODY; case BLOCK_STMT: return ParserRuleContext.OPEN_BRACE; case IF_BLOCK: return ParserRuleContext.IF_KEYWORD; case WHILE_BLOCK: return ParserRuleContext.WHILE_KEYWORD; case WHILE_KEYWORD: return ParserRuleContext.EXPRESSION; case CHECKING_KEYWORD: return ParserRuleContext.EXPRESSION; case CALL_STMT: return ParserRuleContext.CALL_STMT_START; case PANIC_STMT: return ParserRuleContext.PANIC_KEYWORD; case PANIC_KEYWORD: return ParserRuleContext.EXPRESSION; case FUNC_CALL: return ParserRuleContext.IMPORT_PREFIX; case IMPORT_KEYWORD: return ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME; case IMPORT_PREFIX: case NAMESPACE_PREFIX: return ParserRuleContext.SEMICOLON; case VERSION_NUMBER: case VERSION_KEYWORD: return ParserRuleContext.MAJOR_VERSION; case SLASH: return ParserRuleContext.IMPORT_MODULE_NAME; case IMPORT_ORG_OR_MODULE_NAME: return ParserRuleContext.IMPORT_DECL_RHS; case IMPORT_MODULE_NAME: return ParserRuleContext.AFTER_IMPORT_MODULE_NAME; case AS_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.IMPORT_DECL) { return ParserRuleContext.IMPORT_PREFIX; } else if (parentCtx == ParserRuleContext.XML_NAMESPACE_DECLARATION) { return ParserRuleContext.NAMESPACE_PREFIX; } throw new IllegalStateException(); case MAJOR_VERSION: case MINOR_VERSION: case IMPORT_SUB_VERSION: return ParserRuleContext.MAJOR_MINOR_VERSION_END; case PATCH_VERSION: return ParserRuleContext.IMPORT_PREFIX_DECL; case IMPORT_DECL: return ParserRuleContext.IMPORT_KEYWORD; case CONTINUE_STATEMENT: return ParserRuleContext.CONTINUE_KEYWORD; case BREAK_STATEMENT: return ParserRuleContext.BREAK_KEYWORD; case CONTINUE_KEYWORD: case BREAK_KEYWORD: return ParserRuleContext.SEMICOLON; case RETURN_STMT: return ParserRuleContext.RETURN_KEYWORD; case RETURN_KEYWORD: return ParserRuleContext.RETURN_STMT_RHS; case ACCESS_EXPRESSION: return ParserRuleContext.VARIABLE_REF; case MAPPING_FIELD_NAME: return ParserRuleContext.SPECIFIC_FIELD_RHS; case COLON: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) { return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.MULTI_RECEIVE_WORKERS) { return ParserRuleContext.PEER_WORKER_NAME; } else if (parentCtx == ParserRuleContext.MULTI_WAIT_FIELDS) { return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.CONDITIONAL_EXPRESSION) { endContext(); return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.MAPPING_BINDING_PATTERN) { return ParserRuleContext.VARIABLE_NAME; } else if (parentCtx == ParserRuleContext.FIELD_BINDING_PATTERN) { endContext(); return ParserRuleContext.VARIABLE_NAME; } return ParserRuleContext.IDENTIFIER; case STRING_LITERAL: return ParserRuleContext.COLON; case COMPUTED_FIELD_NAME: return ParserRuleContext.OPEN_BRACKET; case LISTENERS_LIST: return ParserRuleContext.EXPRESSION; case ON_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.ANNOTATION_DECL) { return ParserRuleContext.ANNOT_ATTACH_POINTS_LIST; } return ParserRuleContext.LISTENERS_LIST; case RESOURCE_KEYWORD: return ParserRuleContext.FUNC_DEF; case SERVICE_DECL: return ParserRuleContext.SERVICE_KEYWORD; case SERVICE_KEYWORD: return ParserRuleContext.OPTIONAL_SERVICE_NAME; case SERVICE_NAME: return ParserRuleContext.ON_KEYWORD; case LISTENER_KEYWORD: return ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER; case LISTENER_DECL: return ParserRuleContext.LISTENER_KEYWORD; case FINAL_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case CONSTANT_DECL: return ParserRuleContext.CONST_KEYWORD; case CONST_KEYWORD: return ParserRuleContext.CONST_DECL_TYPE; case CONST_DECL_TYPE: return ParserRuleContext.CONST_DECL_RHS; case NIL_TYPE_DESCRIPTOR: return ParserRuleContext.OPEN_PARENTHESIS; case TYPEOF_EXPRESSION: return ParserRuleContext.TYPEOF_KEYWORD; case TYPEOF_KEYWORD: return ParserRuleContext.EXPRESSION; case OPTIONAL_TYPE_DESCRIPTOR: return ParserRuleContext.QUESTION_MARK; case UNARY_EXPRESSION: return ParserRuleContext.UNARY_OPERATOR; case UNARY_OPERATOR: return ParserRuleContext.EXPRESSION; case ARRAY_TYPE_DESCRIPTOR: return ParserRuleContext.OPEN_BRACKET; case ARRAY_LENGTH: return ParserRuleContext.CLOSE_BRACKET; case AT: return ParserRuleContext.ANNOT_REFERENCE; case DOC_STRING: return ParserRuleContext.ANNOTATIONS; case ANNOTATIONS: return ParserRuleContext.AT; case MAPPING_CONSTRUCTOR: return ParserRuleContext.OPEN_BRACE; case VARIABLE_REF: case TYPE_REFERENCE: case ANNOT_REFERENCE: case ANNOT_TAG_REFERENCE: return ParserRuleContext.QUALIFIED_IDENTIFIER; case QUALIFIED_IDENTIFIER: nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.COLON_TOKEN) { return ParserRuleContext.COLON; } case IDENTIFIER: parentCtx = getParentContext(); switch (parentCtx) { case VARIABLE_REF: endContext(); return getNextRuleForExpr(); case TYPE_REFERENCE: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.SEMICOLON; case ANNOT_REFERENCE: endContext(); return ParserRuleContext.ANNOTATION_REF_RHS; case ANNOTATION_DECL: return ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS; case ANNOT_TAG_REFERENCE: endContext(); return ParserRuleContext.EXPRESSION_RHS; default: throw new IllegalStateException(parentCtx.toString()); } case IS_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_EXPRESSION; case NULL_KEYWORD: return ParserRuleContext.EXPRESSION_RHS; case NIL_LITERAL: return ParserRuleContext.OPEN_PARENTHESIS; case LOCAL_TYPE_DEFINITION_STMT: return ParserRuleContext.TYPE_KEYWORD; case RIGHT_ARROW: return ParserRuleContext.EXPRESSION; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STATEMENT_START_IDENTIFIER: return getNextRuleForDecimalIntegerLiteral(); case EXPRESSION_STATEMENT: return ParserRuleContext.EXPRESSION_STATEMENT_START; case MAP_KEYWORD: case FUTURE_KEYWORD: case LOCK_STMT: return ParserRuleContext.LOCK_KEYWORD; case LOCK_KEYWORD: return ParserRuleContext.BLOCK_STMT; case RECORD_FIELD: return ParserRuleContext.RECORD_FIELD_START; case ANNOTATION_TAG: return ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS; case ANNOTATION_KEYWORD: return ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE; case ANNOT_ATTACH_POINTS_LIST: return ParserRuleContext.ATTACH_POINT; case FIELD_IDENT: case FUNCTION_IDENT: case IDENT_AFTER_OBJECT_IDENT: case SINGLE_KEYWORD_ATTACH_POINT_IDENT: case ATTACH_POINT: return ParserRuleContext.ATTACH_POINT_END; case RECORD_FIELD_OR_RECORD_END: return ParserRuleContext.RECORD_BODY_END; case SOURCE_KEYWORD: return ParserRuleContext.ATTACH_POINT_IDENT; case OBJECT_IDENT: return ParserRuleContext.IDENT_AFTER_OBJECT_IDENT; case RECORD_IDENT: return ParserRuleContext.FIELD_IDENT; case RESOURCE_IDENT: return ParserRuleContext.FUNCTION_IDENT; case ANNOTATION_DECL: return ParserRuleContext.ANNOTATION_KEYWORD; case XML_NAMESPACE_DECLARATION: return ParserRuleContext.XMLNS_KEYWORD; case XMLNS_KEYWORD: return ParserRuleContext.CONSTANT_EXPRESSION; case CONSTANT_EXPRESSION: return ParserRuleContext.CONSTANT_EXPRESSION_START; case XML_NAMESPACE_PREFIX_DECL: return ParserRuleContext.SEMICOLON; case NAMED_WORKER_DECL: return ParserRuleContext.WORKER_KEYWORD; case WORKER_KEYWORD: return ParserRuleContext.WORKER_NAME; case WORKER_NAME: return ParserRuleContext.WORKER_NAME_RHS; case FORK_STMT: return ParserRuleContext.FORK_KEYWORD; default: return getNextRuleInternal(currentCtx, nextLookahead); } } private ParserRuleContext getNextRuleInternal(ParserRuleContext currentCtx, int nextLookahead) { ParserRuleContext parentCtx; switch (currentCtx) { case FORK_KEYWORD: return ParserRuleContext.OPEN_BRACE; case TRAP_KEYWORD: return ParserRuleContext.EXPRESSION; case LIST_CONSTRUCTOR: return ParserRuleContext.OPEN_BRACKET; case FOREACH_STMT: return ParserRuleContext.FOREACH_KEYWORD; case FOREACH_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case IN_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LET_EXPR_LET_VAR_DECL) { endContext(); } return ParserRuleContext.EXPRESSION; case TYPE_CAST: return ParserRuleContext.LT; case PIPE: if (getParentContext() == ParserRuleContext.ALTERNATE_WAIT_EXPRS) { return ParserRuleContext.EXPRESSION; } return ParserRuleContext.TYPE_DESCRIPTOR; case TABLE_CONSTRUCTOR: return ParserRuleContext.OPEN_BRACKET; case TABLE_KEYWORD: if (isInTypeDescContext()) { return ParserRuleContext.ROW_TYPE_PARAM; } return ParserRuleContext.TABLE_KEYWORD_RHS; case KEY_SPECIFIER: return ParserRuleContext.KEY_KEYWORD; case KEY_KEYWORD: if (isInTypeDescContext()) { return ParserRuleContext.KEY_CONSTRAINTS_RHS; } return ParserRuleContext.OPEN_PARENTHESIS; case ERROR_KEYWORD: if (isInTypeDescContext()) { return ParserRuleContext.ERROR_TYPE_PARAM_START; } return ParserRuleContext.ARG_LIST_START; case ERROR_TYPE_PARAM_START: return ParserRuleContext.ERROR_TYPE_PARAMS; case LET_EXPRESSION: return ParserRuleContext.LET_KEYWORD; case LET_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { return ParserRuleContext.LET_CLAUSE_LET_VAR_DECL; } else if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); return ParserRuleContext.LET_CLAUSE_LET_VAR_DECL; } return ParserRuleContext.LET_EXPR_LET_VAR_DECL; case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: return ParserRuleContext.LET_VAR_DECL_START; case STREAM_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION) { return ParserRuleContext.QUERY_EXPRESSION; } return ParserRuleContext.LT; case END_OF_TYPE_DESC: return getNextRuleForTypeDescriptor(); case TYPED_BINDING_PATTERN: return ParserRuleContext.TYPE_DESCRIPTOR; case CAPTURE_BINDING_PATTERN: return ParserRuleContext.VARIABLE_NAME; case REST_BINDING_PATTERN: return ParserRuleContext.ELLIPSIS; case LIST_BINDING_PATTERN: return ParserRuleContext.OPEN_BRACKET; case MAPPING_BINDING_PATTERN: return ParserRuleContext.OPEN_BRACE; case FIELD_BINDING_PATTERN: return ParserRuleContext.VARIABLE_NAME; case PARAMETERIZED_TYPE: return ParserRuleContext.LT; case NEW_KEYWORD: return ParserRuleContext.NEW_KEYWORD_RHS; case LT: return getNextRuleForLt(); case GT: return getNextRuleForGt(nextLookahead); case TEMPLATE_END: return ParserRuleContext.EXPRESSION_RHS; case TEMPLATE_START: return ParserRuleContext.TEMPLATE_BODY; case TEMPLATE_BODY: return ParserRuleContext.TEMPLATE_MEMBER; case TEMPLATE_STRING: return ParserRuleContext.TEMPLATE_STRING_RHS; case INTERPOLATION_START_TOKEN: return ParserRuleContext.EXPRESSION; case XML_KEYWORD: case STRING_KEYWORD: return ParserRuleContext.TEMPLATE_START; case ARG_LIST_START: return ParserRuleContext.ARG_LIST; case ARG_LIST_END: endContext(); return ParserRuleContext.EXPRESSION_RHS; case ARG_LIST: return ParserRuleContext.ARG_START_OR_ARG_LIST_END; case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: case TYPE_DESC_IN_TUPLE: return ParserRuleContext.TYPE_DESCRIPTOR; case VAR_DECL_STARTED_WITH_DENTIFIER: startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); return ParserRuleContext.TYPEDESC_RHS; case INFERRED_TYPE_DESC: return ParserRuleContext.GT; case ROW_TYPE_PARAM: return ParserRuleContext.LT; case PARENTHESISED_TYPE_DESC_START: return ParserRuleContext.TYPE_DESC_IN_PARENTHESIS; case SELECT_CLAUSE: return ParserRuleContext.SELECT_KEYWORD; case SELECT_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { endContext(); } if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); endContext(); } return ParserRuleContext.EXPRESSION; case WHERE_CLAUSE: return ParserRuleContext.WHERE_KEYWORD; case WHERE_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); } return ParserRuleContext.EXPRESSION; case FROM_CLAUSE: return ParserRuleContext.FROM_KEYWORD; case FROM_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); } return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case LET_CLAUSE: return ParserRuleContext.LET_KEYWORD; case QUERY_EXPRESSION: return ParserRuleContext.FROM_CLAUSE; case TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION: return ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START; case BITWISE_AND_OPERATOR: return ParserRuleContext.TYPE_DESCRIPTOR; case EXPR_FUNC_BODY_START: return ParserRuleContext.EXPRESSION; case AMBIGUOUS_FUNC_TYPE_DESC_RHS: endContext(); startContext(ParserRuleContext.VAR_DECL_STMT); startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); return ParserRuleContext.TYPEDESC_RHS; case FUNC_TYPE_DESC_END: endContext(); return ParserRuleContext.TYPEDESC_RHS; case IMPLICIT_ANON_FUNC_PARAM: return ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS; case EXPLICIT_ANON_FUNC_EXPR_BODY_START: endContext(); return ParserRuleContext.EXPR_FUNC_BODY_START; case OBJECT_MEMBER: return ParserRuleContext.OBJECT_MEMBER_START; case ANNOTATION_END: return getNextRuleForAnnotationEnd(nextLookahead); case START_KEYWORD: return ParserRuleContext.EXPRESSION; case FLUSH_KEYWORD: return ParserRuleContext.OPTIONAL_PEER_WORKER; case PEER_WORKER_NAME: case DEFAULT_KEYWORD: if (getParentContext() == ParserRuleContext.MULTI_RECEIVE_WORKERS) { return ParserRuleContext.RECEIVE_FIELD_END; } return ParserRuleContext.EXPRESSION_RHS; case PLUS_TOKEN: case MINUS_TOKEN: return ParserRuleContext.SIGNED_INT_OR_FLOAT_RHS; case SIGNED_INT_OR_FLOAT_RHS: return getNextRuleForExpr(); case TUPLE_TYPE_DESC_START: return ParserRuleContext.TYPE_DESC_IN_TUPLE; case TYPE_DESC_IN_TUPLE_RHS: return ParserRuleContext.OPEN_BRACKET; case WORKER_NAME_OR_METHOD_NAME: return ParserRuleContext.WORKER_NAME_OR_METHOD_NAME; case DEFAULT_WORKER_NAME_IN_ASYNC_SEND: return ParserRuleContext.SEMICOLON; case SYNC_SEND_TOKEN: return ParserRuleContext.PEER_WORKER_NAME; case LEFT_ARROW_TOKEN: return ParserRuleContext.RECEIVE_WORKERS; case MULTI_RECEIVE_WORKERS: return ParserRuleContext.OPEN_BRACE; case RECEIVE_FIELD_NAME: return ParserRuleContext.COLON; case WAIT_KEYWORD: return ParserRuleContext.WAIT_KEYWORD_RHS; case WAIT_FIELD_NAME: return ParserRuleContext.WAIT_FIELD_NAME_RHS; case ALTERNATE_WAIT_EXPR_LIST_END: return getNextRuleForWaitExprListEnd(); case MULTI_WAIT_FIELDS: return ParserRuleContext.OPEN_BRACE; case ALTERNATE_WAIT_EXPRS: return ParserRuleContext.EXPRESSION; case ANNOT_CHAINING_TOKEN: return ParserRuleContext.ANNOT_TAG_REFERENCE; case DO_CLAUSE: return ParserRuleContext.DO_KEYWORD; case DO_KEYWORD: return ParserRuleContext.OPEN_BRACE; case LET_CLAUSE_END: endContext(); return ParserRuleContext.QUERY_PIPELINE_RHS; case MEMBER_ACCESS_KEY_EXPR: return ParserRuleContext.OPEN_BRACKET; case OPTIONAL_CHAINING_TOKEN: return ParserRuleContext.FIELD_OR_FUNC_NAME; case CONDITIONAL_EXPRESSION: return ParserRuleContext.QUESTION_MARK; case TRANSACTION_STMT: return ParserRuleContext.TRANSACTION_KEYWORD; case RETRY_STMT: return ParserRuleContext.RETRY_KEYWORD; case ROLLBACK_STMT: return ParserRuleContext.ROLLBACK_KEYWORD; case TRANSACTION_KEYWORD: return ParserRuleContext.BLOCK_STMT; case COMMIT_KEYWORD: return ParserRuleContext.EXPRESSION_RHS; case ROLLBACK_KEYWORD: return ParserRuleContext.ROLLBACK_RHS; case RETRY_KEYWORD: return ParserRuleContext.RETRY_KEYWORD_RHS; case TRANSACTIONAL_KEYWORD: return ParserRuleContext.EXPRESSION_RHS; case MODULE_ENUM_DECLARATION: return ParserRuleContext.ENUM_KEYWORD; case ENUM_KEYWORD: return ParserRuleContext.MODULE_ENUM_NAME; case MODULE_ENUM_NAME: return ParserRuleContext.OPEN_BRACE; case ENUM_MEMBER_LIST: return ParserRuleContext.ENUM_MEMBER_START; case ENUM_MEMBER_NAME: return ParserRuleContext.ENUM_MEMBER_INTERNAL_RHS; default: throw new IllegalStateException("cannot find the next rule for: " + currentCtx); } } private void startContextIfRequired(ParserRuleContext currentCtx) { switch (currentCtx) { case COMP_UNIT: case FUNC_DEF_OR_FUNC_TYPE: case ANON_FUNC_EXPRESSION: case FUNC_DEF: case FUNC_TYPE_DESC: case EXTERNAL_FUNC_BODY: case FUNC_BODY_BLOCK: case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case VAR_DECL_STMT: case ASSIGNMENT_STMT: case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: case MODULE_TYPE_DEFINITION: case RECORD_FIELD: case RECORD_TYPE_DESCRIPTOR: case OBJECT_TYPE_DESCRIPTOR: case ARG_LIST: case OBJECT_FUNC_OR_FIELD: case IF_BLOCK: case BLOCK_STMT: case WHILE_BLOCK: case PANIC_STMT: case CALL_STMT: case IMPORT_DECL: case CONTINUE_STATEMENT: case BREAK_STATEMENT: case RETURN_STMT: case COMPUTED_FIELD_NAME: case LISTENERS_LIST: case SERVICE_DECL: case LISTENER_DECL: case CONSTANT_DECL: case NIL_TYPE_DESCRIPTOR: case COMPOUND_ASSIGNMENT_STMT: case OPTIONAL_TYPE_DESCRIPTOR: case ARRAY_TYPE_DESCRIPTOR: case ANNOTATIONS: case VARIABLE_REF: case TYPE_REFERENCE: case ANNOT_REFERENCE: case ANNOT_TAG_REFERENCE: case MAPPING_CONSTRUCTOR: case LOCAL_TYPE_DEFINITION_STMT: case EXPRESSION_STATEMENT: case NIL_LITERAL: case LOCK_STMT: case ANNOTATION_DECL: case ANNOT_ATTACH_POINTS_LIST: case XML_NAMESPACE_DECLARATION: case CONSTANT_EXPRESSION: case NAMED_WORKER_DECL: case FORK_STMT: case FOREACH_STMT: case LIST_CONSTRUCTOR: case TYPE_CAST: case KEY_SPECIFIER: case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: case ROW_TYPE_PARAM: case TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION: case OBJECT_MEMBER: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case REST_BINDING_PATTERN: case FIELD_BINDING_PATTERN: case TYPED_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case MULTI_RECEIVE_WORKERS: case MULTI_WAIT_FIELDS: case ALTERNATE_WAIT_EXPRS: case DO_CLAUSE: case MEMBER_ACCESS_KEY_EXPR: case CONDITIONAL_EXPRESSION: case TRANSACTION_STMT: case RETRY_STMT: case ROLLBACK_STMT: case MODULE_ENUM_DECLARATION: case ENUM_MEMBER_LIST: case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: case TYPE_DESC_IN_TUPLE: startContext(currentCtx); break; default: break; } switch (currentCtx) { case TABLE_CONSTRUCTOR: case QUERY_EXPRESSION: switchContext(currentCtx); break; default: break; } } private ParserRuleContext getNextRuleForCloseParenthsis() { ParserRuleContext parentCtx; parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.PARAM_LIST) { endContext(); return ParserRuleContext.FUNC_OPTIONAL_RETURNS; } else if (isParameter(parentCtx)) { endContext(); endContext(); return ParserRuleContext.FUNC_OPTIONAL_RETURNS; } else if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR) { endContext(); return ParserRuleContext.TYPEDESC_RHS; } else if (parentCtx == ParserRuleContext.NIL_LITERAL) { endContext(); return getNextRuleForExpr(); } else if (parentCtx == ParserRuleContext.KEY_SPECIFIER) { endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS; } else if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } else if (parentCtx == ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS) { endContext(); return ParserRuleContext.INFER_PARAM_END_OR_PARENTHESIS_END; } return ParserRuleContext.EXPRESSION_RHS; } private ParserRuleContext getNextRuleForOpenParenthesis() { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.EXPRESSION_STATEMENT) { return ParserRuleContext.EXPRESSION_STATEMENT_START; } else if (isStatement(parentCtx) || isExpressionContext(parentCtx) || parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE || parentCtx == ParserRuleContext.FUNC_TYPE_DESC || parentCtx == ParserRuleContext.FUNC_DEF || parentCtx == ParserRuleContext.ANON_FUNC_EXPRESSION) { startContext(ParserRuleContext.PARAM_LIST); return ParserRuleContext.PARAM_LIST; } else if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR || parentCtx == ParserRuleContext.NIL_LITERAL) { return ParserRuleContext.CLOSE_PARENTHESIS; } else if (parentCtx == ParserRuleContext.KEY_SPECIFIER) { return ParserRuleContext.KEY_SPECIFIER_RHS; } else if (isInTypeDescContext()) { startContext(ParserRuleContext.KEY_SPECIFIER); return ParserRuleContext.KEY_SPECIFIER_RHS; } else if (isParameter(parentCtx)) { return ParserRuleContext.EXPRESSION; } return ParserRuleContext.EXPRESSION; } private ParserRuleContext getNextRuleForOpenBrace(int nextLookahead) { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LISTENERS_LIST) { endContext(); } switch (parentCtx) { case OBJECT_TYPE_DESCRIPTOR: return ParserRuleContext.OBJECT_MEMBER; case RECORD_TYPE_DESCRIPTOR: return ParserRuleContext.RECORD_FIELD; case MAPPING_CONSTRUCTOR: return ParserRuleContext.FIRST_MAPPING_FIELD; case FORK_STMT: return ParserRuleContext.NAMED_WORKER_DECL; case MULTI_RECEIVE_WORKERS: return ParserRuleContext.RECEIVE_FIELD; case MULTI_WAIT_FIELDS: return ParserRuleContext.WAIT_FIELD_NAME; case MODULE_ENUM_DECLARATION: return ParserRuleContext.ENUM_MEMBER_LIST; case MAPPING_BINDING_PATTERN: return ParserRuleContext.MAPPING_BINDING_PATTERN_CONTENTS; default: return ParserRuleContext.STATEMENT; } } private boolean isExpressionContext(ParserRuleContext ctx) { switch (ctx) { case LISTENERS_LIST: case MAPPING_CONSTRUCTOR: case COMPUTED_FIELD_NAME: case LIST_CONSTRUCTOR: case INTERPOLATION: case ARG_LIST: case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: case TABLE_CONSTRUCTOR: case QUERY_EXPRESSION: case TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION: return true; default: return false; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForParamType() { ParserRuleContext parentCtx; parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.REQUIRED_PARAM || parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) { return ParserRuleContext.VARIABLE_NAME; } else if (parentCtx == ParserRuleContext.REST_PARAM) { return ParserRuleContext.ELLIPSIS; } else { throw new IllegalStateException(); } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForComma() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case PARAM_LIST: case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: endContext(); return parentCtx; case ARG_LIST: return ParserRuleContext.ARG_START; case MAPPING_CONSTRUCTOR: return ParserRuleContext.MAPPING_FIELD; case LISTENERS_LIST: case LIST_CONSTRUCTOR: return ParserRuleContext.EXPRESSION; case ANNOT_ATTACH_POINTS_LIST: return ParserRuleContext.ATTACH_POINT; case TABLE_CONSTRUCTOR: return ParserRuleContext.MAPPING_CONSTRUCTOR; case KEY_SPECIFIER: return ParserRuleContext.VARIABLE_NAME; case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: return ParserRuleContext.LET_VAR_DECL_START; case TYPE_DESC_IN_STREAM_TYPE_DESC: return ParserRuleContext.TYPE_DESCRIPTOR; case BRACED_EXPR_OR_ANON_FUNC_PARAMS: return ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM; case TYPE_DESC_IN_TUPLE: return ParserRuleContext.TYPE_DESCRIPTOR; case LIST_BINDING_PATTERN: return ParserRuleContext.LIST_BINDING_PATTERN_CONTENTS; case FIELD_BINDING_PATTERN: endContext(); case MAPPING_BINDING_PATTERN: return ParserRuleContext.MAPPING_BINDING_PATTERN_CONTENTS; case MULTI_RECEIVE_WORKERS: return ParserRuleContext.RECEIVE_FIELD; case MULTI_WAIT_FIELDS: return ParserRuleContext.WAIT_FIELD_NAME; case ENUM_MEMBER_LIST: return ParserRuleContext.ENUM_MEMBER_START; case MEMBER_ACCESS_KEY_EXPR: return ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END; default: throw new IllegalStateException(parentCtx.toString()); } } /** * Get the next parser context to visit after a type descriptor. * * @return Next parser context */ private ParserRuleContext getNextRuleForTypeDescriptor() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case TYPE_DESC_IN_ANNOTATION_DECL: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.ANNOTATION_TAG; case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.VARIABLE_NAME; case TYPE_DESC_IN_TYPE_BINDING_PATTERN: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } if (getParentContext() == ParserRuleContext.FOREACH_STMT) { return ParserRuleContext.BINDING_PATTERN; } return ParserRuleContext.VARIABLE_NAME; case TYPE_DESC_IN_PARAM: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.AFTER_PARAMETER_TYPE; case TYPE_DESC_IN_TYPE_DEF: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.SEMICOLON; case TYPE_DESC_IN_ANGLE_BRACKETS: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.GT; case TYPE_DESC_IN_RETURN_TYPE_DESC: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } parentCtx = getParentContext(); switch (parentCtx) { case FUNC_TYPE_DESC: endContext(); return ParserRuleContext.TYPEDESC_RHS; case FUNC_DEF_OR_FUNC_TYPE: return ParserRuleContext.FUNC_BODY_OR_TYPE_DESC_RHS; case FUNC_DEF: return ParserRuleContext.FUNC_BODY; case ANON_FUNC_EXPRESSION: return ParserRuleContext.ANON_FUNC_BODY; case NAMED_WORKER_DECL: return ParserRuleContext.BLOCK_STMT; default: throw new IllegalStateException(parentCtx.toString()); } case TYPE_DESC_IN_EXPRESSION: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.EXPRESSION_RHS; case COMP_UNIT: /* * Fact 1: * ------ * FUNC_DEF_OR_FUNC_TYPE is only possible for module level construct or object member * that starts with 'function' keyword. However, until the end of func-signature, * we don't know whether this is a func-def or a function type. * Hence a var-decl-stmt context is not started until this point. * * Fact 2: * ------ * We reach here for END_OF_TYPE_DESC context. That means we are going to end the * func-type-desc. */ startContext(ParserRuleContext.VAR_DECL_STMT); return ParserRuleContext.VARIABLE_NAME; case OBJECT_MEMBER: return ParserRuleContext.VARIABLE_NAME; case ANNOTATION_DECL: return ParserRuleContext.IDENTIFIER; case TYPE_DESC_IN_STREAM_TYPE_DESC: return ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS; case TYPE_DESC_IN_PARENTHESIS: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.CLOSE_PARENTHESIS; case TYPE_DESC_IN_NEW_EXPR: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.ARG_LIST_START; case TYPE_DESC_IN_TUPLE: return ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS; default: return ParserRuleContext.EXPRESSION_RHS; } } private boolean isInTypeDescContext() { switch (getParentContext()) { case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: case TYPE_DESC_IN_TUPLE: return true; default: return false; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForEqualOp() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case EXTERNAL_FUNC_BODY: return ParserRuleContext.EXTERNAL_KEYWORD; case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case RECORD_FIELD: case ARG_LIST: case OBJECT_MEMBER: case LISTENER_DECL: case CONSTANT_DECL: case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: case ENUM_MEMBER_LIST: return ParserRuleContext.EXPRESSION; default: if (parentCtx == ParserRuleContext.STMT_START_WITH_IDENTIFIER) { switchContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return ParserRuleContext.EXPRESSION; } if (isStatement(parentCtx)) { return ParserRuleContext.EXPRESSION; } throw new IllegalStateException("equal op cannot exist in a " + parentCtx); } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @param nextLookahead Position of the next token to consider, relative to the position of the original error * @return Next parser context */ private ParserRuleContext getNextRuleForCloseBrace(int nextLookahead) { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case FUNC_BODY_BLOCK: endContext(); STToken nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } parentCtx = getParentContext(); switch (parentCtx) { case SERVICE_DECL: return ParserRuleContext.RESOURCE_DEF; case OBJECT_MEMBER: return ParserRuleContext.OBJECT_MEMBER_START; case COMP_UNIT: return ParserRuleContext.TOP_LEVEL_NODE; case FUNC_DEF: case FUNC_DEF_OR_FUNC_TYPE: endContext(); return ParserRuleContext.TOP_LEVEL_NODE; case ANON_FUNC_EXPRESSION: default: endContext(); return ParserRuleContext.EXPRESSION_RHS; } case SERVICE_DECL: endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; case OBJECT_MEMBER: endContext(); case RECORD_TYPE_DESCRIPTOR: case OBJECT_TYPE_DESCRIPTOR: endContext(); return ParserRuleContext.TYPEDESC_RHS; case BLOCK_STMT: endContext(); parentCtx = getParentContext(); switch (parentCtx) { case LOCK_STMT: case FOREACH_STMT: case WHILE_BLOCK: case RETRY_STMT: endContext(); return ParserRuleContext.STATEMENT; case IF_BLOCK: endContext(); return ParserRuleContext.ELSE_BLOCK; case TRANSACTION_STMT: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.RETRY_STMT) { endContext(); } return ParserRuleContext.STATEMENT; case NAMED_WORKER_DECL: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.FORK_STMT) { nextToken = this.tokenReader.peek(nextLookahead); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: return ParserRuleContext.CLOSE_BRACE; default: return ParserRuleContext.STATEMENT; } } else { return ParserRuleContext.STATEMENT; } default: return ParserRuleContext.STATEMENT; } case MAPPING_CONSTRUCTOR: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.TABLE_CONSTRUCTOR) { return ParserRuleContext.TABLE_ROW_END; } if (parentCtx == ParserRuleContext.ANNOTATIONS) { return ParserRuleContext.ANNOTATION_END; } return getNextRuleForExpr(); case FIELD_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: endContext(); return getNextRuleForTypedBindingPattern(); case FORK_STMT: endContext(); return ParserRuleContext.STATEMENT; case INTERPOLATION: endContext(); return ParserRuleContext.TEMPLATE_MEMBER; case MULTI_RECEIVE_WORKERS: case MULTI_WAIT_FIELDS: case DO_CLAUSE: endContext(); return ParserRuleContext.EXPRESSION_RHS; case ENUM_MEMBER_LIST: endContext(); endContext(); return ParserRuleContext.TOP_LEVEL_NODE; default: throw new IllegalStateException("found close-brace in: " + parentCtx); } } private ParserRuleContext getNextRuleForAnnotationEnd(int nextLookahead) { ParserRuleContext parentCtx; STToken nextToken; nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return ParserRuleContext.AT; } endContext(); parentCtx = getParentContext(); switch (parentCtx) { case COMP_UNIT: return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA; case FUNC_DEF: case FUNC_TYPE_DESC: case FUNC_DEF_OR_FUNC_TYPE: case ANON_FUNC_EXPRESSION: return ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC; case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case RECORD_FIELD: return ParserRuleContext.RECORD_FIELD_WITHOUT_METADATA; case OBJECT_MEMBER: return ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA; case SERVICE_DECL: return ParserRuleContext.RESOURCE_DEF; case FUNC_BODY_BLOCK: return ParserRuleContext.STATEMENT_WITHOUT_ANNOTS; case EXTERNAL_FUNC_BODY: return ParserRuleContext.EXTERNAL_KEYWORD; case TYPE_CAST: return ParserRuleContext.TYPE_CAST_PARAM_RHS; case ENUM_MEMBER_LIST: return ParserRuleContext.ENUM_MEMBER_NAME; default: if (isParameter(parentCtx)) { return ParserRuleContext.REQUIRED_PARAM; } return ParserRuleContext.EXPRESSION; } } /** * Get the next parser context to visit after a variable/parameter name. * * @return Next parser context */ /** * Get the next parser context to visit after a {@link ParserRuleContext * * @param nextLookahead Position of the next token to consider, relative to the position of the original error * @return Next parser context */ private ParserRuleContext getNextRuleForSemicolon(int nextLookahead) { STToken nextToken; ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.EXTERNAL_FUNC_BODY) { endContext(); endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { endContext(); return getNextRuleForSemicolon(nextLookahead); } else if (isExpressionContext(parentCtx)) { endContext(); return ParserRuleContext.STATEMENT; } else if (parentCtx == ParserRuleContext.VAR_DECL_STMT) { endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.COMP_UNIT) { return ParserRuleContext.TOP_LEVEL_NODE; } return ParserRuleContext.STATEMENT; } else if (isStatement(parentCtx)) { endContext(); return ParserRuleContext.STATEMENT; } else if (parentCtx == ParserRuleContext.RECORD_FIELD) { endContext(); return ParserRuleContext.RECORD_FIELD_OR_RECORD_END; } else if (parentCtx == ParserRuleContext.MODULE_TYPE_DEFINITION || parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL || parentCtx == ParserRuleContext.ANNOTATION_DECL || parentCtx == ParserRuleContext.XML_NAMESPACE_DECLARATION) { endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) { if (isEndOfObjectTypeNode(nextLookahead)) { endContext(); return ParserRuleContext.CLOSE_BRACE; } return ParserRuleContext.OBJECT_MEMBER_START; } else if (parentCtx == ParserRuleContext.IMPORT_DECL) { endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.ANNOT_ATTACH_POINTS_LIST) { endContext(); endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.FUNC_DEF || parentCtx == ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE) { endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else { throw new IllegalStateException(parentCtx.toString()); } } private ParserRuleContext getNextRuleForDot() { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.IMPORT_DECL) { return ParserRuleContext.IMPORT_MODULE_NAME; } return ParserRuleContext.FIELD_OR_FUNC_NAME; } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForQuestionMark() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case OPTIONAL_TYPE_DESCRIPTOR: endContext(); return ParserRuleContext.TYPEDESC_RHS; case CONDITIONAL_EXPRESSION: return ParserRuleContext.EXPRESSION; default: return ParserRuleContext.SEMICOLON; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForOpenBracket() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case ARRAY_TYPE_DESCRIPTOR: return ParserRuleContext.ARRAY_LENGTH; case LIST_CONSTRUCTOR: return ParserRuleContext.LIST_CONSTRUCTOR_FIRST_MEMBER; case TABLE_CONSTRUCTOR: return ParserRuleContext.ROW_LIST_RHS; case LIST_BINDING_PATTERN: return ParserRuleContext.LIST_BINDING_PATTERN_CONTENTS; default: if (isInTypeDescContext()) { return ParserRuleContext.TYPE_DESC_IN_TUPLE; } return ParserRuleContext.EXPRESSION; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForCloseBracket() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case ARRAY_TYPE_DESCRIPTOR: case TYPE_DESC_IN_TUPLE: endContext(); return ParserRuleContext.TYPEDESC_RHS; case COMPUTED_FIELD_NAME: endContext(); return ParserRuleContext.COLON; case LIST_BINDING_PATTERN: endContext(); return getNextRuleForTypedBindingPattern(); case LIST_CONSTRUCTOR: case TABLE_CONSTRUCTOR: case MEMBER_ACCESS_KEY_EXPR: endContext(); return getNextRuleForExpr(); default: return getNextRuleForExpr(); } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForDecimalIntegerLiteral() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case CONSTANT_EXPRESSION: endContext(); return getNextRuleForConstExpr(); case ARRAY_TYPE_DESCRIPTOR: default: return ParserRuleContext.CLOSE_BRACKET; } } private ParserRuleContext getNextRuleForExpr() { ParserRuleContext parentCtx; parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.CONSTANT_EXPRESSION) { endContext(); return getNextRuleForConstExpr(); } return ParserRuleContext.EXPRESSION_RHS; } private ParserRuleContext getNextRuleForConstExpr() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case XML_NAMESPACE_DECLARATION: return ParserRuleContext.XML_NAMESPACE_PREFIX_DECL; default: if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } throw new IllegalStateException(parentCtx.toString()); } } private ParserRuleContext getNextRuleForLt() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case TYPE_CAST: return ParserRuleContext.TYPE_CAST_PARAM; default: return ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS; } } private ParserRuleContext getNextRuleForGt(int nextLookahead) { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC) { endContext(); return ParserRuleContext.TYPEDESC_RHS; } if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } if (parentCtx == ParserRuleContext.ROW_TYPE_PARAM) { endContext(); return ParserRuleContext.TABLE_TYPE_DESC_RHS; } else if (parentCtx == ParserRuleContext.RETRY_STMT) { return ParserRuleContext.RETRY_TYPE_PARAM_RHS; } endContext(); return ParserRuleContext.EXPRESSION; } /** * Get the next parser context to visit after a typed-binding-pattern. * * @return Next parser context */ private ParserRuleContext getNextRuleForTypedBindingPattern() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case CAPTURE_BINDING_PATTERN: case TYPED_BINDING_PATTERN: endContext(); return getNextRuleForTypedBindingPattern(); case FOREACH_STMT: return ParserRuleContext.IN_KEYWORD; case LIST_BINDING_PATTERN: return ParserRuleContext.LIST_BINDING_PATTERN_END_OR_CONTINUE; case FIELD_BINDING_PATTERN: endContext(); return ParserRuleContext.FIELD_BINDING_PATTERN_END_OR_CONTINUE; case MAPPING_BINDING_PATTERN: return ParserRuleContext.MAPPING_BINDING_PATTERN_END_OR_CONTINUE; case REST_BINDING_PATTERN: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LIST_BINDING_PATTERN) { return ParserRuleContext.CLOSE_BRACKET; } return ParserRuleContext.CLOSE_BRACE; default: throw new IllegalStateException(parentCtx.toString()); } } private ParserRuleContext getNextRuleForWaitExprListEnd() { endContext(); return ParserRuleContext.EXPRESSION_RHS; } /** * Check whether the given context is a statement. * * @param ctx Parser context to check * @return <code>true</code> if the given context is a statement. <code>false</code> otherwise */ private boolean isStatement(ParserRuleContext parentCtx) { switch (parentCtx) { case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case VAR_DECL_STMT: case ASSIGNMENT_STMT: case ASSIGNMENT_OR_VAR_DECL_STMT: case IF_BLOCK: case BLOCK_STMT: case WHILE_BLOCK: case CALL_STMT: case PANIC_STMT: case CONTINUE_STATEMENT: case BREAK_STATEMENT: case RETURN_STMT: case COMPOUND_ASSIGNMENT_STMT: case LOCAL_TYPE_DEFINITION_STMT: case STMT_START_WITH_IDENTIFIER: case EXPRESSION_STATEMENT: case LOCK_STMT: case FORK_STMT: case FOREACH_STMT: case TRANSACTION_STMT: case RETRY_STMT: case ROLLBACK_STMT: return true; default: return false; } } /** * Check whether the given token refers to a binary operator. * * @param token Token to check * @return <code>true</code> if the given token refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(STToken token) { switch (token.kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: case ELVIS_TOKEN: return true; case RIGHT_ARROW_TOKEN: case RIGHT_DOUBLE_ARROW: return true; default: return false; } } private boolean isParameter(ParserRuleContext ctx) { switch (ctx) { case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: case PARAM_LIST: return true; default: return false; } } /** * Get the expected token kind at the given parser rule context. If the parser rule is a terminal, * then the corresponding terminal token kind is returned. If the parser rule is a production, * then {@link SyntaxKind * * @param ctx Parser rule context * @return Token kind expected at the given parser rule */ @Override protected SyntaxKind getExpectedTokenKind(ParserRuleContext ctx) { switch (ctx) { case ASSIGN_OP: return SyntaxKind.EQUAL_TOKEN; case BINARY_OPERATOR: return SyntaxKind.PLUS_TOKEN; case CLOSE_BRACE: return SyntaxKind.CLOSE_BRACE_TOKEN; case CLOSE_PARENTHESIS: case ARG_LIST_END: return SyntaxKind.CLOSE_PAREN_TOKEN; case COMMA: return SyntaxKind.COMMA_TOKEN; case EXTERNAL_KEYWORD: return SyntaxKind.EXTERNAL_KEYWORD; case FUNCTION_KEYWORD: return SyntaxKind.FUNCTION_KEYWORD; case FUNC_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case OPEN_BRACE: return SyntaxKind.OPEN_BRACE_TOKEN; case OPEN_PARENTHESIS: case ARG_LIST_START: return SyntaxKind.OPEN_PAREN_TOKEN; case RETURNS_KEYWORD: return SyntaxKind.RETURNS_KEYWORD; case SEMICOLON: return SyntaxKind.SEMICOLON_TOKEN; case VARIABLE_NAME: case STATEMENT_START_IDENTIFIER: return SyntaxKind.IDENTIFIER_TOKEN; case PUBLIC_KEYWORD: return SyntaxKind.PUBLIC_KEYWORD; case ASSIGNMENT_STMT: return SyntaxKind.IDENTIFIER_TOKEN; case EXPRESSION_RHS: return SyntaxKind.PLUS_TOKEN; case EXPRESSION: case TERMINAL_EXPRESSION: return SyntaxKind.IDENTIFIER_TOKEN; case EXTERNAL_FUNC_BODY: return SyntaxKind.EQUAL_TOKEN; case FUNC_BODY_OR_TYPE_DESC_RHS: case FUNC_BODY_BLOCK: return SyntaxKind.OPEN_BRACE_TOKEN; case FUNC_DEF: case FUNC_DEF_OR_FUNC_TYPE: case FUNC_TYPE_DESC: return SyntaxKind.FUNCTION_KEYWORD; case VAR_DECL_STMT_RHS: return SyntaxKind.SEMICOLON_TOKEN; case SIMPLE_TYPE_DESCRIPTOR: case REQUIRED_PARAM: case VAR_DECL_STMT: case ASSIGNMENT_OR_VAR_DECL_STMT: case DEFAULTABLE_PARAM: case REST_PARAM: return SyntaxKind.TYPE_DESC; case ASTERISK: case INFERRED_TYPE_DESC: return SyntaxKind.ASTERISK_TOKEN; case CLOSED_RECORD_BODY_END: return SyntaxKind.CLOSE_BRACE_PIPE_TOKEN; case CLOSED_RECORD_BODY_START: return SyntaxKind.OPEN_BRACE_PIPE_TOKEN; case ELLIPSIS: return SyntaxKind.ELLIPSIS_TOKEN; case QUESTION_MARK: return SyntaxKind.QUESTION_MARK_TOKEN; case RECORD_BODY_START: return SyntaxKind.OPEN_BRACE_PIPE_TOKEN; case RECORD_FIELD: case RECORD_KEYWORD: return SyntaxKind.RECORD_KEYWORD; case TYPE_KEYWORD: return SyntaxKind.TYPE_KEYWORD; case TYPE_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case TYPE_REFERENCE: case ANNOT_TAG_REFERENCE: return SyntaxKind.IDENTIFIER_TOKEN; case RECORD_BODY_END: return SyntaxKind.CLOSE_BRACE_TOKEN; case OBJECT_KEYWORD: return SyntaxKind.OBJECT_KEYWORD; case PRIVATE_KEYWORD: return SyntaxKind.PRIVATE_KEYWORD; case REMOTE_KEYWORD: return SyntaxKind.REMOTE_KEYWORD; case OBJECT_FIELD_RHS: return SyntaxKind.SEMICOLON_TOKEN; case ABSTRACT_KEYWORD: return SyntaxKind.ABSTRACT_KEYWORD; case CLIENT_KEYWORD: return SyntaxKind.CLIENT_KEYWORD; case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: return SyntaxKind.OBJECT_KEYWORD; case CLOSE_BRACKET: case MEMBER_ACCESS_KEY_EXPR_END: return SyntaxKind.CLOSE_BRACKET_TOKEN; case DOT: return SyntaxKind.DOT_TOKEN; case FIELD_OR_FUNC_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case OPEN_BRACKET: case TUPLE_TYPE_DESC_START: return SyntaxKind.OPEN_BRACKET_TOKEN; case IF_KEYWORD: return SyntaxKind.IF_KEYWORD; case ELSE_KEYWORD: return SyntaxKind.ELSE_KEYWORD; case WHILE_KEYWORD: return SyntaxKind.WHILE_KEYWORD; case CHECKING_KEYWORD: return SyntaxKind.CHECK_KEYWORD; case AS_KEYWORD: return SyntaxKind.AS_KEYWORD; case BOOLEAN_LITERAL: return SyntaxKind.TRUE_KEYWORD; case IMPORT_KEYWORD: return SyntaxKind.IMPORT_KEYWORD; case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case IMPORT_PREFIX: case VARIABLE_REF: case BASIC_LITERAL: case SERVICE_NAME: case IDENTIFIER: case QUALIFIED_IDENTIFIER: case NAMESPACE_PREFIX: case IMPLICIT_ANON_FUNC_PARAM: case WORKER_NAME_OR_METHOD_NAME: case PEER_WORKER_NAME: case RECEIVE_FIELD_NAME: case WAIT_FIELD_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case VERSION_NUMBER: case MAJOR_VERSION: case MINOR_VERSION: case PATCH_VERSION: return SyntaxKind.DECIMAL_INTEGER_LITERAL; case SLASH: return SyntaxKind.SLASH_TOKEN; case VERSION_KEYWORD: return SyntaxKind.VERSION_KEYWORD; case IMPORT_DECL_RHS: return SyntaxKind.SEMICOLON_TOKEN; case IMPORT_SUB_VERSION: return SyntaxKind.SEMICOLON_TOKEN; case COLON: return SyntaxKind.COLON_TOKEN; case MAPPING_FIELD_NAME: case MAPPING_FIELD: return SyntaxKind.IDENTIFIER_TOKEN; case PANIC_KEYWORD: return SyntaxKind.PANIC_KEYWORD; case STRING_LITERAL: return SyntaxKind.STRING_LITERAL; case ON_KEYWORD: return SyntaxKind.ON_KEYWORD; case RESOURCE_KEYWORD: return SyntaxKind.RESOURCE_KEYWORD; case RETURN_KEYWORD: return SyntaxKind.RETURN_KEYWORD; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_KEYWORD; case BREAK_KEYWORD: return SyntaxKind.BREAK_KEYWORD; case LISTENER_KEYWORD: return SyntaxKind.CONST_KEYWORD; case CONTINUE_KEYWORD: return SyntaxKind.CONTINUE_KEYWORD; case CONST_KEYWORD: return SyntaxKind.CONST_KEYWORD; case FINAL_KEYWORD: return SyntaxKind.FINAL_KEYWORD; case CONST_DECL_TYPE: return SyntaxKind.IDENTIFIER_TOKEN; case NIL_TYPE_DESCRIPTOR: return SyntaxKind.NIL_TYPE_DESC; case TYPEOF_KEYWORD: return SyntaxKind.TYPEOF_KEYWORD; case OPTIONAL_TYPE_DESCRIPTOR: return SyntaxKind.OPTIONAL_TYPE_DESC; case UNARY_OPERATOR: return SyntaxKind.PLUS_TOKEN; case ARRAY_TYPE_DESCRIPTOR: return SyntaxKind.ARRAY_TYPE_DESC; case AT: return SyntaxKind.AT_TOKEN; case FIELD_DESCRIPTOR_RHS: return SyntaxKind.SEMICOLON_TOKEN; case AFTER_PARAMETER_TYPE: return SyntaxKind.IDENTIFIER_TOKEN; case CONST_DECL_RHS: return SyntaxKind.EQUAL_TOKEN; case IS_KEYWORD: return SyntaxKind.IS_KEYWORD; case OBJECT_MEMBER_WITHOUT_METADATA: case RECORD_FIELD_WITHOUT_METADATA: case PARAMETER_WITHOUT_ANNOTS: case TYPE_DESCRIPTOR: return SyntaxKind.TYPE_DESC; case TYPEOF_EXPRESSION: return SyntaxKind.TYPEOF_KEYWORD; case RIGHT_ARROW: return SyntaxKind.RIGHT_ARROW_TOKEN; case STMT_START_WITH_EXPR_RHS: return SyntaxKind.EQUAL_TOKEN; case COMPOUND_BINARY_OPERATOR: return SyntaxKind.PLUS_TOKEN; case UNARY_EXPRESSION: return SyntaxKind.PLUS_TOKEN; case MAP_KEYWORD: return SyntaxKind.MAP_KEYWORD; case FUTURE_KEYWORD: return SyntaxKind.FUTURE_KEYWORD; case TYPEDESC_KEYWORD: return SyntaxKind.TYPEDESC_KEYWORD; case GT: return SyntaxKind.GT_TOKEN; case LT: return SyntaxKind.LT_TOKEN; case NULL_KEYWORD: return SyntaxKind.NULL_KEYWORD; case LOCK_KEYWORD: return SyntaxKind.LOCK_KEYWORD; case ANNOTATION_KEYWORD: return SyntaxKind.ANNOTATION_KEYWORD; case ANNOT_DECL_OPTIONAL_TYPE: return SyntaxKind.IDENTIFIER_TOKEN; case ANNOT_DECL_RHS: return SyntaxKind.ON_KEYWORD; case ARRAY_LENGTH: return SyntaxKind.DECIMAL_INTEGER_LITERAL; case ATTACH_POINT_IDENT: case IDENT_AFTER_OBJECT_IDENT: case SINGLE_KEYWORD_ATTACH_POINT_IDENT: return SyntaxKind.TYPE_KEYWORD; case FIELD_IDENT: return SyntaxKind.FIELD_KEYWORD; case FUNCTION_IDENT: return SyntaxKind.FUNCTION_KEYWORD; case HEX_INTEGER_LITERAL: return SyntaxKind.HEX_INTEGER_LITERAL; case RECORD_FIELD_OR_RECORD_END: return SyntaxKind.CLOSE_BRACE_TOKEN; case SOURCE_KEYWORD: return SyntaxKind.SOURCE_KEYWORD; case ATTACH_POINT_END: return SyntaxKind.SEMICOLON_TOKEN; case CONSTANT_EXPRESSION: return SyntaxKind.STRING_LITERAL; case CONSTANT_EXPRESSION_START: case OBJECT_IDENT: return SyntaxKind.OBJECT_KEYWORD; case RECORD_IDENT: return SyntaxKind.RECORD_KEYWORD; case RESOURCE_IDENT: return SyntaxKind.RESOURCE_KEYWORD; case XMLNS_KEYWORD: case XML_NAMESPACE_DECLARATION: return SyntaxKind.XMLNS_KEYWORD; case XML_NAMESPACE_PREFIX_DECL: return SyntaxKind.SEMICOLON_TOKEN; case NAMED_WORKER_DECL: case WORKER_KEYWORD: return SyntaxKind.WORKER_KEYWORD; case WORKER_NAME: case NAMED_WORKERS: case ANNOTATION_TAG: return SyntaxKind.IDENTIFIER_TOKEN; case NIL_LITERAL: return SyntaxKind.OPEN_PAREN_TOKEN; case FORK_KEYWORD: return SyntaxKind.FORK_KEYWORD; case DECIMAL_FLOATING_POINT_LITERAL: return SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL; case HEX_FLOATING_POINT_LITERAL: return SyntaxKind.HEX_FLOATING_POINT_LITERAL; case PARAMETERIZED_TYPE: return SyntaxKind.MAP_KEYWORD; case TRAP_KEYWORD: return SyntaxKind.TRAP_KEYWORD; case FOREACH_KEYWORD: return SyntaxKind.FOREACH_KEYWORD; case IN_KEYWORD: return SyntaxKind.IN_KEYWORD; case PIPE: return SyntaxKind.PIPE_TOKEN; case TABLE_KEYWORD: return SyntaxKind.TABLE_KEYWORD; case KEY_KEYWORD: return SyntaxKind.KEY_KEYWORD; case ERROR_KEYWORD: return SyntaxKind.ERROR_KEYWORD; case STREAM_KEYWORD: return SyntaxKind.STREAM_KEYWORD; case LET_KEYWORD: return SyntaxKind.LET_KEYWORD; case TEMPLATE_END: case TEMPLATE_START: return SyntaxKind.BACKTICK_TOKEN; case LT_TOKEN: return SyntaxKind.LT_TOKEN; case GT_TOKEN: return SyntaxKind.GT_TOKEN; case INTERPOLATION_START_TOKEN: return SyntaxKind.INTERPOLATION_START_TOKEN; case XML_KEYWORD: return SyntaxKind.XML_KEYWORD; case XML_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case STRING_KEYWORD: return SyntaxKind.STRING_KEYWORD; case SELECT_KEYWORD: return SyntaxKind.SELECT_KEYWORD; case WHERE_KEYWORD: return SyntaxKind.WHERE_KEYWORD; case FROM_KEYWORD: return SyntaxKind.FROM_KEYWORD; case EXPR_FUNC_BODY_START: return SyntaxKind.RIGHT_DOUBLE_ARROW; case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: return SyntaxKind.CLOSE_BRACE_TOKEN; case START_KEYWORD: return SyntaxKind.START_KEYWORD; case FLUSH_KEYWORD: return SyntaxKind.FLUSH_KEYWORD; case DEFAULT_KEYWORD: case OPTIONAL_PEER_WORKER: case DEFAULT_WORKER_NAME_IN_ASYNC_SEND: return SyntaxKind.DEFAULT_KEYWORD; case DECIMAL_INTEGER_LITERAL: case SIGNED_INT_OR_FLOAT_RHS: return SyntaxKind.DECIMAL_INTEGER_LITERAL; case SYNC_SEND_TOKEN: return SyntaxKind.SYNC_SEND_TOKEN; case WAIT_KEYWORD: return SyntaxKind.WAIT_KEYWORD; case ANNOT_CHAINING_TOKEN: return SyntaxKind.ANNOT_CHAINING_TOKEN; case OPTIONAL_CHAINING_TOKEN: return SyntaxKind.OPTIONAL_CHAINING_TOKEN; case TRANSACTION_KEYWORD: return SyntaxKind.TRANSACTION_KEYWORD; case COMMIT_KEYWORD: return SyntaxKind.COMMIT_KEYWORD; case RETRY_KEYWORD: return SyntaxKind.RETRY_KEYWORD; case ROLLBACK_KEYWORD: return SyntaxKind.ROLLBACK_KEYWORD; case ENUM_KEYWORD: return SyntaxKind.ENUM_KEYWORD; case MODULE_ENUM_NAME: case ENUM_MEMBER_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case ENUM_MEMBER_INTERNAL_RHS: case ENUM_MEMBER_RHS: return SyntaxKind.CLOSE_BRACE_TOKEN; default: break; } return SyntaxKind.NONE; } /** * Check whether a token kind is a basic literal. * * @param kind Token kind to check * @return <code>true</code> if the given token kind belongs to a basic literal.<code>false</code> otherwise */ private boolean isBasicLiteral(SyntaxKind kind) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; default: return false; } } /** * Check whether the given token refers to a unary operator. * * @param token Token to check * @return <code>true</code> if the given token refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(STToken token) { switch (token.kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } private boolean isSingleKeywordAttachPointIdent(SyntaxKind tokenKind) { switch (tokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: return true; default: return false; } } /** * Check whether the given token is a parameterized type keyword. * * @param tokenKind Token to check * @return <code>true</code> if the given token is a parameterized type keyword. <code>false</code> otherwise */ public boolean isParameterizedTypeToken(SyntaxKind tokenKind) { switch (tokenKind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return true; default: return false; } } }
class BallerinaParserErrorHandler extends AbstractParserErrorHandler { /** * FUNC_DEF_OR_FUNC_TYPE --> When a func-def and func-type-desc are possible. * e.g: start of a module level construct that starts with 'function' keyword. */ private static final ParserRuleContext[] FUNC_TYPE_OR_DEF_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.FUNC_BODY_OR_TYPE_DESC_RHS }; private static final ParserRuleContext[] FUNC_BODY_OR_TYPE_DESC_RHS = { ParserRuleContext.FUNC_BODY, ParserRuleContext.AMBIGUOUS_FUNC_TYPE_DESC_RHS }; /** * FUNC_DEF --> When only function definitions are possible. eg: resource function. */ private static final ParserRuleContext[] FUNC_DEF_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.FUNC_BODY }; private static final ParserRuleContext[] FUNC_BODY = { ParserRuleContext.FUNC_BODY_BLOCK, ParserRuleContext.EXTERNAL_FUNC_BODY }; private static final ParserRuleContext[] OBJECT_FUNC_BODY = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXTERNAL_FUNC_BODY }; /** * ANNON_FUNC--> When a anonymous function is possible. */ private static final ParserRuleContext[] ANNON_FUNC_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.ANON_FUNC_BODY }; private static final ParserRuleContext[] ANON_FUNC_BODY = { ParserRuleContext.FUNC_BODY_BLOCK, ParserRuleContext.EXPLICIT_ANON_FUNC_EXPR_BODY_START }; /** * FUNC_TYPE --> When a only function type is possible. */ private static final ParserRuleContext[] FUNC_TYPE_OPTIONAL_RETURNS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.FUNC_TYPE_DESC_END }; private static final ParserRuleContext[] WORKER_NAME_RHS = { ParserRuleContext.RETURNS_KEYWORD, ParserRuleContext.BLOCK_STMT }; private static final ParserRuleContext[] STATEMENTS = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.ASSIGNMENT_STMT, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.IF_BLOCK, ParserRuleContext.WHILE_BLOCK, ParserRuleContext.CALL_STMT, ParserRuleContext.PANIC_STMT, ParserRuleContext.CONTINUE_STATEMENT, ParserRuleContext.BREAK_STATEMENT, ParserRuleContext.RETURN_STMT, ParserRuleContext.COMPOUND_ASSIGNMENT_STMT, ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT, ParserRuleContext.EXPRESSION_STATEMENT, ParserRuleContext.LOCK_STMT, ParserRuleContext.BLOCK_STMT, ParserRuleContext.NAMED_WORKER_DECL, ParserRuleContext.FORK_STMT, ParserRuleContext.FOREACH_STMT, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.TRANSACTION_STMT, ParserRuleContext.RETRY_STMT, ParserRuleContext.ROLLBACK_STMT }; private static final ParserRuleContext[] VAR_DECL_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] TOP_LEVEL_NODE = { ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE, ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.SERVICE_DECL, ParserRuleContext.ANNOTATION_DECL, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.MODULE_ENUM_DECLARATION, ParserRuleContext.EOF }; private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_METADATA = new ParserRuleContext[] { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE, ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.ANNOTATION_DECL, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.MODULE_ENUM_DECLARATION, ParserRuleContext.EOF }; private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_MODIFIER = { ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE, ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL, ParserRuleContext.ANNOTATION_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.XML_NAMESPACE_DECLARATION, ParserRuleContext.MODULE_ENUM_DECLARATION, ParserRuleContext.EOF }; private static final ParserRuleContext[] TYPE_OR_VAR_NAME = { ParserRuleContext.VARIABLE_NAME, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN }; private static final ParserRuleContext[] ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] FIELD_DESCRIPTOR_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.QUESTION_MARK, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] FIELD_OR_REST_DESCIPTOR_RHS = { ParserRuleContext.ELLIPSIS, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] RECORD_BODY_START = { ParserRuleContext.CLOSED_RECORD_BODY_START, ParserRuleContext.OPEN_BRACE }; private static final ParserRuleContext[] RECORD_BODY_END = { ParserRuleContext.CLOSED_RECORD_BODY_END, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] TYPE_DESCRIPTORS = { ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR, ParserRuleContext.OBJECT_TYPE_DESCRIPTOR, ParserRuleContext.RECORD_TYPE_DESCRIPTOR, ParserRuleContext.NIL_TYPE_DESCRIPTOR, ParserRuleContext.PARAMETERIZED_TYPE, ParserRuleContext.ERROR_KEYWORD, ParserRuleContext.STREAM_KEYWORD, ParserRuleContext.TABLE_KEYWORD, ParserRuleContext.FUNC_TYPE_DESC, ParserRuleContext.PARENTHESISED_TYPE_DESC_START, ParserRuleContext.CONSTANT_EXPRESSION, ParserRuleContext.TUPLE_TYPE_DESC_START }; private static final ParserRuleContext[] RECORD_FIELD_OR_RECORD_END = { ParserRuleContext.RECORD_BODY_END, ParserRuleContext.RECORD_FIELD }; private static final ParserRuleContext[] RECORD_FIELD_START = { ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD }; private static final ParserRuleContext[] RECORD_FIELD_WITHOUT_METADATA = { ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD }; private static final ParserRuleContext[] ARG_START_OR_ARG_LIST_END = { ParserRuleContext.ARG_LIST_END, ParserRuleContext.ARG_START }; private static final ParserRuleContext[] ARG_START = { ParserRuleContext.VARIABLE_NAME, ParserRuleContext.ELLIPSIS, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] ARG_END = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.COMMA }; private static final ParserRuleContext[] NAMED_OR_POSITIONAL_ARG_RHS = { ParserRuleContext.ARG_END, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] OBJECT_FIELD_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] OBJECT_MEMBER_START = { ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK, ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] OBJECT_MEMBER_WITHOUT_METADATA = { ParserRuleContext.ASTERISK, ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD = { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.PRIVATE_KEYWORD, ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY }; private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY = { ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER, ParserRuleContext.OBJECT_METHOD_START }; private static final ParserRuleContext[] OBJECT_METHOD_START = { ParserRuleContext.REMOTE_KEYWORD, ParserRuleContext.FUNCTION_KEYWORD }; private static final ParserRuleContext[] OBJECT_TYPE_DESCRIPTOR_START = { ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER, ParserRuleContext.OBJECT_KEYWORD }; private static final ParserRuleContext[] ELSE_BODY = { ParserRuleContext.IF_BLOCK, ParserRuleContext.OPEN_BRACE }; private static final ParserRuleContext[] ELSE_BLOCK = { ParserRuleContext.ELSE_KEYWORD, ParserRuleContext.STATEMENT }; private static final ParserRuleContext[] CALL_STATEMENT = { ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] IMPORT_PREFIX_DECL = { ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] IMPORT_VERSION = { ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] IMPORT_DECL_RHS = { ParserRuleContext.SLASH, ParserRuleContext.DOT, ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] AFTER_IMPORT_MODULE_NAME = { ParserRuleContext.DOT, ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] MAJOR_MINOR_VERSION_END = { ParserRuleContext.DOT, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] RETURN_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] EXPRESSION_START = { ParserRuleContext.BASIC_LITERAL, ParserRuleContext.NIL_LITERAL, ParserRuleContext.VARIABLE_REF, ParserRuleContext.ACCESS_EXPRESSION, ParserRuleContext.TYPEOF_EXPRESSION, ParserRuleContext.TRAP_KEYWORD, ParserRuleContext.UNARY_EXPRESSION, ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.LIST_CONSTRUCTOR, ParserRuleContext.TYPE_CAST, ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION, ParserRuleContext.LET_EXPRESSION, ParserRuleContext.TEMPLATE_START, ParserRuleContext.XML_KEYWORD, ParserRuleContext.STRING_KEYWORD, ParserRuleContext.BASE64_KEYWORD, ParserRuleContext.BASE64_KEYWORD, ParserRuleContext.ANON_FUNC_EXPRESSION, ParserRuleContext.ERROR_KEYWORD, ParserRuleContext.NEW_KEYWORD, ParserRuleContext.START_KEYWORD, ParserRuleContext.FLUSH_KEYWORD, ParserRuleContext.LEFT_ARROW_TOKEN, ParserRuleContext.WAIT_KEYWORD, ParserRuleContext.COMMIT_KEYWORD, ParserRuleContext.TRANSACTIONAL_KEYWORD, ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION}; private static final ParserRuleContext[] FIRST_MAPPING_FIELD_START = { ParserRuleContext.MAPPING_FIELD, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] MAPPING_FIELD_START = { ParserRuleContext.MAPPING_FIELD_NAME, ParserRuleContext.STRING_LITERAL, ParserRuleContext.COMPUTED_FIELD_NAME, ParserRuleContext.ELLIPSIS }; private static final ParserRuleContext[] SPECIFIC_FIELD_RHS = { ParserRuleContext.COLON, ParserRuleContext.MAPPING_FIELD_END }; private static final ParserRuleContext[] MAPPING_FIELD_END = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.COMMA }; private static final ParserRuleContext[] OPTIONAL_SERVICE_NAME = { ParserRuleContext.SERVICE_NAME, ParserRuleContext.ON_KEYWORD }; private static final ParserRuleContext[] RESOURCE_DEF_START = { ParserRuleContext.RESOURCE_KEYWORD, ParserRuleContext.FUNC_DEF, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] CONST_DECL_RHS = { ParserRuleContext.STATEMENT_START_IDENTIFIER, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] ARRAY_LENGTH = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.DECIMAL_INTEGER_LITERAL, ParserRuleContext.HEX_INTEGER_LITERAL, ParserRuleContext.ASTERISK, ParserRuleContext.VARIABLE_REF }; private static final ParserRuleContext[] PARAM_LIST = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.REQUIRED_PARAM }; private static final ParserRuleContext[] PARAMETER_START = { ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESC_IN_PARAM }; private static final ParserRuleContext[] PARAMETER_WITHOUT_ANNOTS = { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESC_IN_PARAM }; private static final ParserRuleContext[] REQUIRED_PARAM_NAME_RHS = { ParserRuleContext.PARAM_END, ParserRuleContext.ASSIGN_OP }; private static final ParserRuleContext[] PARAM_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS }; private static final ParserRuleContext[] STMT_START_WITH_EXPR_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.RIGHT_ARROW, ParserRuleContext.COMPOUND_BINARY_OPERATOR, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] EXPRESSION_STATEMENT_START = { ParserRuleContext.VARIABLE_REF, ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.START_KEYWORD, ParserRuleContext.FLUSH_KEYWORD }; private static final ParserRuleContext[] ANNOT_DECL_OPTIONAL_TYPE = { ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER, ParserRuleContext.ANNOTATION_TAG }; private static final ParserRuleContext[] CONST_DECL_TYPE = { ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] ANNOT_DECL_RHS = { ParserRuleContext.ANNOTATION_TAG, ParserRuleContext.ON_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] ANNOT_OPTIONAL_ATTACH_POINTS = { ParserRuleContext.ON_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] ATTACH_POINT = { ParserRuleContext.SOURCE_KEYWORD, ParserRuleContext.ATTACH_POINT_IDENT }; private static final ParserRuleContext[] ATTACH_POINT_IDENT = { ParserRuleContext.SINGLE_KEYWORD_ATTACH_POINT_IDENT, ParserRuleContext.OBJECT_IDENT, ParserRuleContext.RESOURCE_IDENT, ParserRuleContext.RECORD_IDENT }; private static final ParserRuleContext[] ATTACH_POINT_END = { ParserRuleContext.COMMA, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] XML_NAMESPACE_PREFIX_DECL = { ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] CONSTANT_EXPRESSION = { ParserRuleContext.BASIC_LITERAL, ParserRuleContext.VARIABLE_REF, ParserRuleContext.PLUS_TOKEN, ParserRuleContext.MINUS_TOKEN, ParserRuleContext.NIL_LITERAL }; private static final ParserRuleContext[] LIST_CONSTRUCTOR_RHS = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] TYPE_CAST_PARAM = { ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS, ParserRuleContext.ANNOTATIONS }; private static final ParserRuleContext[] TYPE_CAST_PARAM_RHS = { ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS, ParserRuleContext.GT }; private static final ParserRuleContext[] TABLE_KEYWORD_RHS = { ParserRuleContext.KEY_SPECIFIER, ParserRuleContext.TABLE_CONSTRUCTOR }; private static final ParserRuleContext[] ROW_LIST_RHS = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.MAPPING_CONSTRUCTOR }; private static final ParserRuleContext[] TABLE_ROW_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACKET }; private static final ParserRuleContext[] KEY_SPECIFIER_RHS = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] TABLE_KEY_RHS = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS }; private static final ParserRuleContext[] ERROR_TYPE_PARAMS = { ParserRuleContext.INFERRED_TYPE_DESC, ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS }; private static final ParserRuleContext[] LET_VAR_DECL_START = { ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, ParserRuleContext.ANNOTATIONS }; private static final ParserRuleContext[] STREAM_TYPE_FIRST_PARAM_RHS = { ParserRuleContext.COMMA, ParserRuleContext.GT }; private static final ParserRuleContext[] TEMPLATE_MEMBER = { ParserRuleContext.TEMPLATE_STRING, ParserRuleContext.INTERPOLATION_START_TOKEN, ParserRuleContext.TEMPLATE_END }; private static final ParserRuleContext[] TEMPLATE_STRING_RHS = { ParserRuleContext.INTERPOLATION_START_TOKEN, ParserRuleContext.TEMPLATE_END }; private static final ParserRuleContext[] KEY_CONSTRAINTS_RHS = { ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.LT }; private static final ParserRuleContext[] FUNCTION_KEYWORD_RHS = { ParserRuleContext.FUNC_NAME, ParserRuleContext.OPEN_PARENTHESIS }; private static final ParserRuleContext[] TYPEDESC_RHS = { ParserRuleContext.END_OF_TYPE_DESC, ParserRuleContext.ARRAY_TYPE_DESCRIPTOR, ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR, ParserRuleContext.PIPE, ParserRuleContext.BITWISE_AND_OPERATOR }; private static final ParserRuleContext[] TABLE_TYPE_DESC_RHS = { ParserRuleContext.KEY_KEYWORD, ParserRuleContext.TYPEDESC_RHS }; private static final ParserRuleContext[] NEW_KEYWORD_RHS = { ParserRuleContext.TYPE_DESC_IN_NEW_EXPR, ParserRuleContext.EXPRESSION_RHS }; private static final ParserRuleContext[] TABLE_CONSTRUCTOR_OR_QUERY_START = { ParserRuleContext.TABLE_KEYWORD, ParserRuleContext.STREAM_KEYWORD, ParserRuleContext.QUERY_EXPRESSION }; private static final ParserRuleContext[] TABLE_CONSTRUCTOR_OR_QUERY_RHS = { ParserRuleContext.TABLE_CONSTRUCTOR, ParserRuleContext.QUERY_EXPRESSION }; private static final ParserRuleContext[] QUERY_EXPRESSION_RHS = { ParserRuleContext.SELECT_CLAUSE, ParserRuleContext.WHERE_CLAUSE, ParserRuleContext.FROM_CLAUSE, ParserRuleContext.LET_CLAUSE, ParserRuleContext.DO_CLAUSE, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.COMMA }; private static final ParserRuleContext[] ANNOTATION_REF_RHS = { ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.ANNOTATION_END }; private static final ParserRuleContext[] INFER_PARAM_END_OR_PARENTHESIS_END = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.EXPR_FUNC_BODY_START }; private static final ParserRuleContext[] OPTIONAL_PEER_WORKER = { ParserRuleContext.PEER_WORKER_NAME, ParserRuleContext.EXPRESSION_RHS }; private static final ParserRuleContext[] TYPE_DESC_IN_TUPLE_RHS = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.COMMA, ParserRuleContext.ELLIPSIS }; private static final ParserRuleContext[] LIST_CONSTRUCTOR_MEMBER_END = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.COMMA }; private static final ParserRuleContext[] NIL_OR_PARENTHESISED_TYPE_DESC_RHS = { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.TYPE_DESCRIPTOR }; private static final ParserRuleContext[] BINDING_PATTERN = { ParserRuleContext.CAPTURE_BINDING_PATTERN, ParserRuleContext.LIST_BINDING_PATTERN, ParserRuleContext.MAPPING_BINDING_PATTERN}; private static final ParserRuleContext[] LIST_BINDING_PATTERN_CONTENTS = { ParserRuleContext.REST_BINDING_PATTERN, ParserRuleContext.BINDING_PATTERN }; private static final ParserRuleContext[] MAPPING_BINDING_PATTERN_MEMBER = { ParserRuleContext.REST_BINDING_PATTERN, ParserRuleContext.FIELD_BINDING_PATTERN }; private static final ParserRuleContext[] LIST_BINDING_PATTERN_END_OR_CONTINUE = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACKET }; private static final ParserRuleContext[] MAPPING_BINDING_PATTERN_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACE}; private static final ParserRuleContext[] FIELD_BINDING_PATTERN_END = { ParserRuleContext.COMMA, ParserRuleContext.COLON, ParserRuleContext.CLOSE_BRACE}; private static final ParserRuleContext[] REMOTE_CALL_OR_ASYNC_SEND_RHS = { ParserRuleContext.WORKER_NAME_OR_METHOD_NAME, ParserRuleContext.DEFAULT_WORKER_NAME_IN_ASYNC_SEND }; private static final ParserRuleContext[] REMOTE_CALL_OR_ASYNC_SEND_END = { ParserRuleContext.ARG_LIST_START, ParserRuleContext.SEMICOLON }; private static final ParserRuleContext[] RECEIVE_WORKERS = { ParserRuleContext.PEER_WORKER_NAME, ParserRuleContext.MULTI_RECEIVE_WORKERS }; private static final ParserRuleContext[] RECEIVE_FIELD = { ParserRuleContext.PEER_WORKER_NAME, ParserRuleContext.RECEIVE_FIELD_NAME }; private static final ParserRuleContext[] RECEIVE_FIELD_END = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.COMMA }; private static final ParserRuleContext[] WAIT_KEYWORD_RHS = { ParserRuleContext.MULTI_WAIT_FIELDS, ParserRuleContext.ALTERNATE_WAIT_EXPRS }; private static final ParserRuleContext[] WAIT_FIELD_NAME_RHS = { ParserRuleContext.COLON, ParserRuleContext.WAIT_FIELD_END }; private static final ParserRuleContext[] WAIT_FIELD_END = { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.COMMA }; private static final ParserRuleContext[] WAIT_FUTURE_EXPR_END = { ParserRuleContext.ALTERNATE_WAIT_EXPR_LIST_END, ParserRuleContext.PIPE }; private static final ParserRuleContext[] ENUM_MEMBER_START = { ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.ENUM_MEMBER_NAME }; private static final ParserRuleContext[] ENUM_MEMBER_INTERNAL_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.ENUM_MEMBER_RHS }; private static final ParserRuleContext[] ENUM_MEMBER_RHS = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACE }; private static final ParserRuleContext[] MEMBER_ACCESS_KEY_EXPR_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACKET }; private static final ParserRuleContext[] ROLLBACK_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXPRESSION }; private static final ParserRuleContext[] RETRY_KEYWORD_RHS = { ParserRuleContext.LT, ParserRuleContext.RETRY_TYPE_PARAM_RHS }; private static final ParserRuleContext[] RETRY_TYPE_PARAM_RHS = { ParserRuleContext.ARG_LIST_START, ParserRuleContext.RETRY_BODY }; private static final ParserRuleContext[] RETRY_BODY = { ParserRuleContext.BLOCK_STMT, ParserRuleContext.TRANSACTION_STMT }; private static final ParserRuleContext[] LIST_BP_OR_TUPLE_TYPE_MEMBER = { ParserRuleContext.TYPE_DESCRIPTOR, ParserRuleContext.LIST_BINDING_PATTERN_CONTENTS }; private static final ParserRuleContext[] LIST_BP_OR_TUPLE_TYPE_DESC_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.VARIABLE_NAME }; private static final ParserRuleContext[] BRACKETED_LIST_MEMBER_END = { ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACKET }; private static final ParserRuleContext[] BRACKETED_LIST_MEMBER = { ParserRuleContext.EXPRESSION, ParserRuleContext.BINDING_PATTERN }; private static final ParserRuleContext[] LIST_BINDING_MEMBER_OR_ARRAY_LENGTH = { ParserRuleContext.ARRAY_LENGTH, ParserRuleContext.BINDING_PATTERN }; private static final ParserRuleContext[] BRACKETED_LIST_RHS = { ParserRuleContext.ASSIGN_OP, ParserRuleContext.VARIABLE_NAME, ParserRuleContext.BINDING_PATTERN, ParserRuleContext.EXPRESSION_RHS }; public BallerinaParserErrorHandler(AbstractTokenReader tokenReader) { super(tokenReader); } @Override protected boolean isProductionWithAlternatives(ParserRuleContext currentCtx) { switch (currentCtx) { case TOP_LEVEL_NODE: case TOP_LEVEL_NODE_WITHOUT_MODIFIER: case TOP_LEVEL_NODE_WITHOUT_METADATA: case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case FUNC_BODY_OR_TYPE_DESC_RHS: case VAR_DECL_STMT_RHS: case EXPRESSION_RHS: case PARAMETER_NAME_RHS: case ASSIGNMENT_OR_VAR_DECL_STMT: case AFTER_PARAMETER_TYPE: case FIELD_DESCRIPTOR_RHS: case RECORD_BODY_START: case RECORD_BODY_END: case TYPE_DESCRIPTOR: case NAMED_OR_POSITIONAL_ARG_RHS: case OBJECT_FIELD_RHS: case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: case OBJECT_MEMBER: case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: case ELSE_BODY: case IMPORT_DECL_RHS: case IMPORT_SUB_VERSION: case VERSION_NUMBER: case IMPORT_VERSION_DECL: case IMPORT_PREFIX_DECL: case MAPPING_FIELD: case FIRST_MAPPING_FIELD: case SPECIFIC_FIELD_RHS: case RESOURCE_DEF: case PARAMETER_WITHOUT_ANNOTS: case PARAMETER_START: case STMT_START_WITH_EXPR_RHS: case RECORD_FIELD_OR_RECORD_END: case CONST_DECL_TYPE: case CONST_DECL_RHS: case ANNOT_OPTIONAL_ATTACH_POINTS: case XML_NAMESPACE_PREFIX_DECL: case ANNOT_DECL_OPTIONAL_TYPE: case ANNOT_DECL_RHS: case TABLE_KEYWORD_RHS: case ARRAY_LENGTH: case TYPEDESC_RHS: case ERROR_TYPE_PARAMS: case STREAM_TYPE_FIRST_PARAM_RHS: case KEY_CONSTRAINTS_RHS: case TABLE_TYPE_DESC_RHS: case FUNC_BODY: case FUNC_OPTIONAL_RETURNS: case TERMINAL_EXPRESSION: case TABLE_CONSTRUCTOR_OR_QUERY_START: case TABLE_CONSTRUCTOR_OR_QUERY_RHS: case QUERY_PIPELINE_RHS: case ANON_FUNC_BODY: case BINDING_PATTERN: case LIST_BINDING_PATTERN_CONTENTS: case LIST_BINDING_PATTERN_END_OR_CONTINUE: case MAPPING_BINDING_PATTERN_MEMBER: case MAPPING_BINDING_PATTERN_END: case FIELD_BINDING_PATTERN_END: case REMOTE_CALL_OR_ASYNC_SEND_RHS: case REMOTE_CALL_OR_ASYNC_SEND_END: case RECEIVE_FIELD_END: case RECEIVE_WORKERS: case WAIT_FIELD_NAME: case WAIT_FIELD_NAME_RHS: case WAIT_FIELD_END: case WAIT_FUTURE_EXPR_END: case MAPPING_FIELD_END: case ENUM_MEMBER_START: case ENUM_MEMBER_INTERNAL_RHS: case ENUM_MEMBER_RHS: case LIST_BP_OR_TUPLE_TYPE_MEMBER: case LIST_BP_OR_TUPLE_TYPE_DESC_RHS: case STMT_START_IDENTIFIER_RHS: case BRACKETED_LIST_RHS: case BRACKETED_LIST_MEMBER: case BRACKETED_LIST_MEMBER_END: case AMBIGUOUS_STMT: case TYPED_BINDING_PATTERN_TYPE_RHS: case TYPE_DESC_IN_TUPLE_RHS: case LIST_BINDING_MEMBER_OR_ARRAY_LENGTH: return true; default: return false; } } private boolean isEndOfObjectTypeNode(int nextLookahead) { STToken nextToken = this.tokenReader.peek(nextLookahead); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: STToken nextNextToken = this.tokenReader.peek(nextLookahead + 1); switch (nextNextToken.kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } } /** * Search for a solution. * Terminals are directly matched and Non-terminals which have alternative productions are seekInAlternativesPaths() * * @param currentCtx Current context * @param lookahead Position of the next token to consider, relative to the position of the original error. * @param currentDepth Amount of distance traveled so far. * @return Recovery result */ @Override protected Result seekMatch(ParserRuleContext currentCtx, int lookahead, int currentDepth, boolean isEntryPoint) { boolean hasMatch; boolean skipRule; int matchingRulesCount = 0; while (currentDepth < lookaheadLimit) { hasMatch = true; skipRule = false; STToken nextToken = this.tokenReader.peek(lookahead); switch (currentCtx) { case EOF: hasMatch = nextToken.kind == SyntaxKind.EOF_TOKEN; break; case PUBLIC_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.PUBLIC_KEYWORD; break; case PRIVATE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.PRIVATE_KEYWORD; break; case REMOTE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.REMOTE_KEYWORD; break; case FUNCTION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FUNCTION_KEYWORD; break; case FUNC_NAME: case VARIABLE_NAME: case TYPE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case IMPORT_MODULE_NAME: case IMPORT_PREFIX: case MAPPING_FIELD_NAME: case SERVICE_NAME: case QUALIFIED_IDENTIFIER: case IDENTIFIER: case ANNOTATION_TAG: case NAMESPACE_PREFIX: case WORKER_NAME: case IMPLICIT_ANON_FUNC_PARAM: case WORKER_NAME_OR_METHOD_NAME: case RECEIVE_FIELD_NAME: case WAIT_FIELD_NAME: case FIELD_BINDING_PATTERN_NAME: hasMatch = nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case OPEN_PARENTHESIS: case PARENTHESISED_TYPE_DESC_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN; break; case CLOSE_PARENTHESIS: hasMatch = nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN; break; case RETURNS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RETURNS_KEYWORD; break; case SIMPLE_TYPE_DESCRIPTOR: hasMatch = BallerinaParser.isSimpleType(nextToken.kind) || nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case OPEN_BRACE: hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_TOKEN; break; case CLOSE_BRACE: hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN; break; case ASSIGN_OP: hasMatch = nextToken.kind == SyntaxKind.EQUAL_TOKEN; break; case EXTERNAL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.EXTERNAL_KEYWORD; break; case SEMICOLON: hasMatch = nextToken.kind == SyntaxKind.SEMICOLON_TOKEN; break; case BINARY_OPERATOR: hasMatch = isBinaryOperator(nextToken); break; case COMMA: hasMatch = nextToken.kind == SyntaxKind.COMMA_TOKEN; break; case CLOSED_RECORD_BODY_END: hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN; break; case CLOSED_RECORD_BODY_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN; break; case ELLIPSIS: hasMatch = nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN; break; case QUESTION_MARK: hasMatch = nextToken.kind == SyntaxKind.QUESTION_MARK_TOKEN; break; case RECORD_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RECORD_KEYWORD; break; case TYPE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TYPE_KEYWORD; break; case ARG_LIST_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN; break; case ARG_LIST_END: hasMatch = nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN; break; case OBJECT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.OBJECT_KEYWORD; break; case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: if (currentDepth == 0) { hasMatch = false; break; } hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD || nextToken.kind == SyntaxKind.CLIENT_KEYWORD; break; case ABSTRACT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD; break; case CLIENT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CLIENT_KEYWORD; break; case OPEN_BRACKET: case TUPLE_TYPE_DESC_START: hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN; break; case CLOSE_BRACKET: hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACKET_TOKEN; break; case DOT: hasMatch = nextToken.kind == SyntaxKind.DOT_TOKEN; break; case IF_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IF_KEYWORD; break; case ELSE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ELSE_KEYWORD; break; case WHILE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WHILE_KEYWORD; break; case PANIC_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.PANIC_KEYWORD; break; case AS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.AS_KEYWORD; break; case LOCK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.LOCK_KEYWORD; break; case BOOLEAN_LITERAL: hasMatch = nextToken.kind == SyntaxKind.TRUE_KEYWORD || nextToken.kind == SyntaxKind.FALSE_KEYWORD; break; case DECIMAL_INTEGER_LITERAL: case MAJOR_VERSION: case MINOR_VERSION: case PATCH_VERSION: hasMatch = nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL; break; case IMPORT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IMPORT_KEYWORD; break; case SLASH: hasMatch = nextToken.kind == SyntaxKind.SLASH_TOKEN; break; case VERSION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.VERSION_KEYWORD; break; case CONTINUE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CONTINUE_KEYWORD; break; case BREAK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.BREAK_KEYWORD; break; case RETURN_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RETURN_KEYWORD; break; case BASIC_LITERAL: hasMatch = isBasicLiteral(nextToken.kind); break; case COLON: hasMatch = nextToken.kind == SyntaxKind.COLON_TOKEN; break; case STRING_LITERAL: hasMatch = nextToken.kind == SyntaxKind.STRING_LITERAL; break; case SERVICE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.SERVICE_KEYWORD; break; case ON_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ON_KEYWORD; break; case RESOURCE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RESOURCE_KEYWORD; break; case LISTENER_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.LISTENER_KEYWORD; break; case CONST_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CONST_KEYWORD; break; case FINAL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FINAL_KEYWORD; break; case TYPEOF_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TYPEOF_KEYWORD; break; case UNARY_OPERATOR: hasMatch = isUnaryOperator(nextToken); break; case HEX_INTEGER_LITERAL: hasMatch = nextToken.kind == SyntaxKind.HEX_INTEGER_LITERAL; break; case AT: hasMatch = nextToken.kind == SyntaxKind.AT_TOKEN; break; case IS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IS_KEYWORD; break; case RIGHT_ARROW: hasMatch = nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN; break; case PARAMETERIZED_TYPE: hasMatch = isParameterizedTypeToken(nextToken.kind); break; case LT: hasMatch = nextToken.kind == SyntaxKind.LT_TOKEN; break; case GT: hasMatch = nextToken.kind == SyntaxKind.GT_TOKEN; break; case NULL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.NULL_KEYWORD; break; case ANNOTATION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ANNOTATION_KEYWORD; break; case FIELD_IDENT: hasMatch = nextToken.kind == SyntaxKind.FIELD_KEYWORD; break; case FUNCTION_IDENT: hasMatch = nextToken.kind == SyntaxKind.FUNCTION_KEYWORD; break; case IDENT_AFTER_OBJECT_IDENT: hasMatch = nextToken.kind == SyntaxKind.TYPE_KEYWORD || nextToken.kind == SyntaxKind.FUNCTION_KEYWORD || nextToken.kind == SyntaxKind.FIELD_KEYWORD; break; case SOURCE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.SOURCE_KEYWORD; break; case SINGLE_KEYWORD_ATTACH_POINT_IDENT: hasMatch = isSingleKeywordAttachPointIdent(nextToken.kind); break; case OBJECT_IDENT: hasMatch = nextToken.kind == SyntaxKind.OBJECT_KEYWORD; break; case RECORD_IDENT: hasMatch = nextToken.kind == SyntaxKind.RECORD_KEYWORD; break; case RESOURCE_IDENT: hasMatch = nextToken.kind == SyntaxKind.RESOURCE_KEYWORD; break; case XMLNS_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.XMLNS_KEYWORD; break; case WORKER_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WORKER_KEYWORD; break; case FORK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FORK_KEYWORD; break; case DECIMAL_FLOATING_POINT_LITERAL: hasMatch = nextToken.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL; break; case HEX_FLOATING_POINT_LITERAL: hasMatch = nextToken.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL; break; case TRAP_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TRAP_KEYWORD; break; case FOREACH_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FOREACH_KEYWORD; break; case IN_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.IN_KEYWORD; break; case PIPE: hasMatch = nextToken.kind == SyntaxKind.PIPE_TOKEN; break; case TABLE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TABLE_KEYWORD; break; case KEY_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.KEY_KEYWORD; break; case ERROR_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ERROR_KEYWORD; break; case LET_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.LET_KEYWORD; break; case STREAM_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.STREAM_KEYWORD; break; case TEMPLATE_START: case TEMPLATE_END: hasMatch = nextToken.kind == SyntaxKind.BACKTICK_TOKEN; break; case XML_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.XML_KEYWORD; break; case STRING_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.STRING_KEYWORD; break; case BASE16_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.BASE16_KEYWORD; break; case BASE64_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.BASE64_KEYWORD; break; case ASTERISK: case INFERRED_TYPE_DESC: hasMatch = nextToken.kind == SyntaxKind.ASTERISK_TOKEN; break; case NEW_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.NEW_KEYWORD; break; case SELECT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.SELECT_KEYWORD; break; case WHERE_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WHERE_KEYWORD; break; case FROM_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FROM_KEYWORD; break; case BITWISE_AND_OPERATOR: hasMatch = nextToken.kind == SyntaxKind.BITWISE_AND_TOKEN; break; case EXPR_FUNC_BODY_START: hasMatch = nextToken.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN; break; case START_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.START_KEYWORD; break; case FLUSH_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.FLUSH_KEYWORD; break; case DEFAULT_KEYWORD: case DEFAULT_WORKER_NAME_IN_ASYNC_SEND: hasMatch = nextToken.kind == SyntaxKind.DEFAULT_KEYWORD; break; case PLUS_TOKEN: hasMatch = nextToken.kind == SyntaxKind.PLUS_TOKEN; break; case MINUS_TOKEN: hasMatch = nextToken.kind == SyntaxKind.MINUS_TOKEN; break; case SIGNED_INT_OR_FLOAT_RHS: hasMatch = BallerinaParser.isIntOrFloat(nextToken); break; case SYNC_SEND_TOKEN: hasMatch = nextToken.kind == SyntaxKind.SYNC_SEND_TOKEN; break; case PEER_WORKER_NAME: hasMatch = nextToken.kind == SyntaxKind.DEFAULT_KEYWORD || nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case LEFT_ARROW_TOKEN: hasMatch = nextToken.kind == SyntaxKind.LEFT_ARROW_TOKEN; break; case WAIT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.WAIT_KEYWORD; break; case CHECKING_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.CHECK_KEYWORD || nextToken.kind == SyntaxKind.CHECKPANIC_KEYWORD; break; case DO_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.DO_KEYWORD; break; case ANNOT_CHAINING_TOKEN: hasMatch = nextToken.kind == SyntaxKind.ANNOT_CHAINING_TOKEN; break; case OPTIONAL_CHAINING_TOKEN: hasMatch = nextToken.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN; break; case TRANSACTION_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TRANSACTION_KEYWORD; break; case COMMIT_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.COMMIT_KEYWORD; break; case RETRY_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.RETRY_KEYWORD; break; case ROLLBACK_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ROLLBACK_KEYWORD; break; case TRANSACTIONAL_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.TRANSACTIONAL_KEYWORD; break; case ENUM_KEYWORD: hasMatch = nextToken.kind == SyntaxKind.ENUM_KEYWORD; break; case MODULE_ENUM_NAME: case ENUM_MEMBER_NAME: hasMatch = nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN; break; case UNION_OR_INTERSECTION_TOKEN: hasMatch = nextToken.kind == SyntaxKind.PIPE_TOKEN || nextToken.kind == SyntaxKind.BITWISE_AND_TOKEN; break; case VARIABLE_REF: case TYPE_REFERENCE: case ANNOT_REFERENCE: case FIELD_ACCESS_IDENTIFIER: case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: default: if (hasAlternativePaths(currentCtx)) { return seekMatchInAlternativePaths(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); } skipRule = true; hasMatch = true; break; } if (!hasMatch) { return fixAndContinue(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); } currentCtx = getNextRule(currentCtx, lookahead + 1); if (!skipRule) { currentDepth++; matchingRulesCount++; lookahead++; isEntryPoint = false; } } Result result = new Result(new ArrayDeque<>(), matchingRulesCount); result.solution = new Solution(Action.KEEP, currentCtx, SyntaxKind.NONE, currentCtx.toString()); return result; } private boolean hasAlternativePaths(ParserRuleContext currentCtx) { switch (currentCtx) { case TOP_LEVEL_NODE: case TOP_LEVEL_NODE_WITHOUT_MODIFIER: case TOP_LEVEL_NODE_WITHOUT_METADATA: case FUNC_OPTIONAL_RETURNS: case FUNC_BODY_OR_TYPE_DESC_RHS: case ANON_FUNC_BODY: case FUNC_BODY: case OBJECT_FUNC_BODY: case EXPRESSION: case TERMINAL_EXPRESSION: case VAR_DECL_STMT_RHS: case EXPRESSION_RHS: case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case PARAM_LIST: case REQUIRED_PARAM_NAME_RHS: case STATEMENT_START_IDENTIFIER: case ASSIGNMENT_OR_VAR_DECL_STMT_RHS: case FIELD_DESCRIPTOR_RHS: case FIELD_OR_REST_DESCIPTOR_RHS: case RECORD_BODY_END: case RECORD_BODY_START: case TYPE_DESCRIPTOR: case RECORD_FIELD_OR_RECORD_END: case RECORD_FIELD_START: case RECORD_FIELD_WITHOUT_METADATA: case ARG_START: case ARG_START_OR_ARG_LIST_END: case NAMED_OR_POSITIONAL_ARG_RHS: case ARG_END: case OBJECT_MEMBER_START: case OBJECT_MEMBER_WITHOUT_METADATA: case OBJECT_FIELD_RHS: case OBJECT_METHOD_START: case OBJECT_FUNC_OR_FIELD: case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: case OBJECT_TYPE_DESCRIPTOR_START: case ELSE_BLOCK: case ELSE_BODY: case CALL_STMT_START: case IMPORT_PREFIX_DECL: case IMPORT_VERSION_DECL: case IMPORT_DECL_RHS: case AFTER_IMPORT_MODULE_NAME: case MAJOR_MINOR_VERSION_END: case RETURN_STMT_RHS: case ACCESS_EXPRESSION: case FIRST_MAPPING_FIELD: case MAPPING_FIELD: case SPECIFIC_FIELD_RHS: case MAPPING_FIELD_END: case OPTIONAL_SERVICE_NAME: case RESOURCE_DEF: case CONST_DECL_TYPE: case CONST_DECL_RHS: case ARRAY_LENGTH: case PARAMETER_START: case PARAMETER_WITHOUT_ANNOTS: case STMT_START_WITH_EXPR_RHS: case EXPRESSION_STATEMENT_START: case ANNOT_DECL_OPTIONAL_TYPE: case ANNOT_DECL_RHS: case ANNOT_OPTIONAL_ATTACH_POINTS: case ATTACH_POINT: case ATTACH_POINT_IDENT: case ATTACH_POINT_END: case XML_NAMESPACE_PREFIX_DECL: case CONSTANT_EXPRESSION_START: case TYPEDESC_RHS: case LIST_CONSTRUCTOR_FIRST_MEMBER: case TYPE_CAST_PARAM: case TYPE_CAST_PARAM_RHS: case TABLE_KEYWORD_RHS: case ROW_LIST_RHS: case TABLE_ROW_END: case KEY_SPECIFIER_RHS: case TABLE_KEY_RHS: case ERROR_TYPE_PARAMS: case LET_VAR_DECL_START: case STREAM_TYPE_FIRST_PARAM_RHS: case TEMPLATE_MEMBER: case TEMPLATE_STRING_RHS: case FUNCTION_KEYWORD_RHS: case WORKER_NAME_RHS: case BINDING_PATTERN: case LIST_BINDING_PATTERN_END_OR_CONTINUE: case FIELD_BINDING_PATTERN_END: case LIST_BINDING_PATTERN_CONTENTS: case MAPPING_BINDING_PATTERN_END: case MAPPING_BINDING_PATTERN_MEMBER: case KEY_CONSTRAINTS_RHS: case TABLE_TYPE_DESC_RHS: case NEW_KEYWORD_RHS: case TABLE_CONSTRUCTOR_OR_QUERY_START: case TABLE_CONSTRUCTOR_OR_QUERY_RHS: case QUERY_PIPELINE_RHS: case BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS: case ANON_FUNC_PARAM_RHS: case PARAM_END: case ANNOTATION_REF_RHS: case INFER_PARAM_END_OR_PARENTHESIS_END: case TYPE_DESC_IN_TUPLE_RHS: case LIST_CONSTRUCTOR_MEMBER_END: case NIL_OR_PARENTHESISED_TYPE_DESC_RHS: case REMOTE_CALL_OR_ASYNC_SEND_RHS: case REMOTE_CALL_OR_ASYNC_SEND_END: case RECEIVE_WORKERS: case RECEIVE_FIELD: case RECEIVE_FIELD_END: case WAIT_KEYWORD_RHS: case WAIT_FIELD_NAME_RHS: case WAIT_FIELD_END: case WAIT_FUTURE_EXPR_END: case OPTIONAL_PEER_WORKER: case ENUM_MEMBER_START: case ENUM_MEMBER_INTERNAL_RHS: case ENUM_MEMBER_RHS: case MEMBER_ACCESS_KEY_EXPR_END: case ROLLBACK_RHS: case RETRY_KEYWORD_RHS: case RETRY_TYPE_PARAM_RHS: case RETRY_BODY: case LIST_BP_OR_TUPLE_TYPE_MEMBER: case LIST_BP_OR_TUPLE_TYPE_DESC_RHS: case STMT_START_IDENTIFIER_RHS: case BRACKETED_LIST_RHS: case BRACKETED_LIST_MEMBER: case BRACKETED_LIST_MEMBER_END: case AMBIGUOUS_STMT: case LIST_BINDING_MEMBER_OR_ARRAY_LENGTH: return true; default: return false; } } private Result seekMatchInAlternativePaths(ParserRuleContext currentCtx, int lookahead, int currentDepth, int matchingRulesCount, boolean isEntryPoint) { ParserRuleContext[] alternativeRules; switch (currentCtx) { case TOP_LEVEL_NODE: alternativeRules = TOP_LEVEL_NODE; break; case TOP_LEVEL_NODE_WITHOUT_MODIFIER: alternativeRules = TOP_LEVEL_NODE_WITHOUT_MODIFIER; break; case TOP_LEVEL_NODE_WITHOUT_METADATA: alternativeRules = TOP_LEVEL_NODE_WITHOUT_METADATA; break; case FUNC_OPTIONAL_RETURNS: ParserRuleContext parentCtx = getParentContext(); ParserRuleContext[] alternatives; if (parentCtx == ParserRuleContext.FUNC_DEF) { alternatives = FUNC_DEF_OPTIONAL_RETURNS; } else if (parentCtx == ParserRuleContext.ANON_FUNC_EXPRESSION) { alternatives = ANNON_FUNC_OPTIONAL_RETURNS; } else if (parentCtx == ParserRuleContext.FUNC_TYPE_DESC) { alternatives = FUNC_TYPE_OPTIONAL_RETURNS; } else { alternatives = FUNC_TYPE_OR_DEF_OPTIONAL_RETURNS; } alternativeRules = alternatives; break; case FUNC_BODY_OR_TYPE_DESC_RHS: alternativeRules = FUNC_BODY_OR_TYPE_DESC_RHS; break; case ANON_FUNC_BODY: alternativeRules = ANON_FUNC_BODY; break; case FUNC_BODY: case OBJECT_FUNC_BODY: if (getGrandParentContext() == ParserRuleContext.OBJECT_MEMBER) { alternativeRules = OBJECT_FUNC_BODY; } else { alternativeRules = FUNC_BODY; } break; case EXPRESSION: case TERMINAL_EXPRESSION: alternativeRules = EXPRESSION_START; break; case VAR_DECL_STMT_RHS: alternativeRules = VAR_DECL_RHS; break; case EXPRESSION_RHS: return seekMatchInExpressionRhs(lookahead, currentDepth, matchingRulesCount, isEntryPoint); case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: return seekInStatements(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); case PARAM_LIST: alternativeRules = PARAM_LIST; break; case REQUIRED_PARAM_NAME_RHS: alternativeRules = REQUIRED_PARAM_NAME_RHS; break; case STATEMENT_START_IDENTIFIER: alternativeRules = TYPE_OR_VAR_NAME; break; case ASSIGNMENT_OR_VAR_DECL_STMT_RHS: alternativeRules = ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN; break; case FIELD_DESCRIPTOR_RHS: alternativeRules = FIELD_DESCRIPTOR_RHS; break; case FIELD_OR_REST_DESCIPTOR_RHS: alternativeRules = FIELD_OR_REST_DESCIPTOR_RHS; break; case RECORD_BODY_END: alternativeRules = RECORD_BODY_END; break; case RECORD_BODY_START: alternativeRules = RECORD_BODY_START; break; case TYPE_DESCRIPTOR: alternativeRules = TYPE_DESCRIPTORS; break; case RECORD_FIELD_OR_RECORD_END: alternativeRules = RECORD_FIELD_OR_RECORD_END; break; case RECORD_FIELD_START: alternativeRules = RECORD_FIELD_START; break; case RECORD_FIELD_WITHOUT_METADATA: alternativeRules = RECORD_FIELD_WITHOUT_METADATA; break; case ARG_START: alternativeRules = ARG_START; break; case ARG_START_OR_ARG_LIST_END: alternativeRules = ARG_START_OR_ARG_LIST_END; break; case NAMED_OR_POSITIONAL_ARG_RHS: alternativeRules = NAMED_OR_POSITIONAL_ARG_RHS; break; case ARG_END: alternativeRules = ARG_END; break; case OBJECT_MEMBER_START: alternativeRules = OBJECT_MEMBER_START; break; case OBJECT_MEMBER_WITHOUT_METADATA: alternativeRules = OBJECT_MEMBER_WITHOUT_METADATA; break; case OBJECT_FIELD_RHS: alternativeRules = OBJECT_FIELD_RHS; break; case OBJECT_METHOD_START: alternativeRules = OBJECT_METHOD_START; break; case OBJECT_FUNC_OR_FIELD: alternativeRules = OBJECT_FUNC_OR_FIELD; break; case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY: alternativeRules = OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY; break; case OBJECT_TYPE_DESCRIPTOR_START: alternativeRules = OBJECT_TYPE_DESCRIPTOR_START; break; case ELSE_BLOCK: alternativeRules = ELSE_BLOCK; break; case ELSE_BODY: alternativeRules = ELSE_BODY; break; case CALL_STMT_START: alternativeRules = CALL_STATEMENT; break; case IMPORT_PREFIX_DECL: alternativeRules = IMPORT_PREFIX_DECL; break; case IMPORT_VERSION_DECL: alternativeRules = IMPORT_VERSION; break; case IMPORT_DECL_RHS: alternativeRules = IMPORT_DECL_RHS; break; case AFTER_IMPORT_MODULE_NAME: alternativeRules = AFTER_IMPORT_MODULE_NAME; break; case MAJOR_MINOR_VERSION_END: alternativeRules = MAJOR_MINOR_VERSION_END; break; case RETURN_STMT_RHS: alternativeRules = RETURN_RHS; break; case ACCESS_EXPRESSION: return seekInAccessExpression(currentCtx, lookahead, currentDepth, matchingRulesCount, isEntryPoint); case FIRST_MAPPING_FIELD: alternativeRules = FIRST_MAPPING_FIELD_START; break; case MAPPING_FIELD: alternativeRules = MAPPING_FIELD_START; break; case SPECIFIC_FIELD_RHS: alternativeRules = SPECIFIC_FIELD_RHS; break; case MAPPING_FIELD_END: alternativeRules = MAPPING_FIELD_END; break; case OPTIONAL_SERVICE_NAME: alternativeRules = OPTIONAL_SERVICE_NAME; break; case RESOURCE_DEF: alternativeRules = RESOURCE_DEF_START; break; case CONST_DECL_TYPE: alternativeRules = CONST_DECL_TYPE; break; case CONST_DECL_RHS: alternativeRules = CONST_DECL_RHS; break; case ARRAY_LENGTH: alternativeRules = ARRAY_LENGTH; break; case PARAMETER_START: alternativeRules = PARAMETER_START; break; case PARAMETER_WITHOUT_ANNOTS: alternativeRules = PARAMETER_WITHOUT_ANNOTS; break; case STMT_START_WITH_EXPR_RHS: alternativeRules = STMT_START_WITH_EXPR_RHS; break; case EXPRESSION_STATEMENT_START: alternativeRules = EXPRESSION_STATEMENT_START; break; case ANNOT_DECL_OPTIONAL_TYPE: alternativeRules = ANNOT_DECL_OPTIONAL_TYPE; break; case ANNOT_DECL_RHS: alternativeRules = ANNOT_DECL_RHS; break; case ANNOT_OPTIONAL_ATTACH_POINTS: alternativeRules = ANNOT_OPTIONAL_ATTACH_POINTS; break; case ATTACH_POINT: alternativeRules = ATTACH_POINT; break; case ATTACH_POINT_IDENT: alternativeRules = ATTACH_POINT_IDENT; break; case ATTACH_POINT_END: alternativeRules = ATTACH_POINT_END; break; case XML_NAMESPACE_PREFIX_DECL: alternativeRules = XML_NAMESPACE_PREFIX_DECL; break; case CONSTANT_EXPRESSION_START: alternativeRules = CONSTANT_EXPRESSION; break; case TYPEDESC_RHS: alternativeRules = TYPEDESC_RHS; break; case LIST_CONSTRUCTOR_FIRST_MEMBER: alternativeRules = LIST_CONSTRUCTOR_RHS; break; case TYPE_CAST_PARAM: alternativeRules = TYPE_CAST_PARAM; break; case TYPE_CAST_PARAM_RHS: alternativeRules = TYPE_CAST_PARAM_RHS; break; case TABLE_KEYWORD_RHS: alternativeRules = TABLE_KEYWORD_RHS; break; case ROW_LIST_RHS: alternativeRules = ROW_LIST_RHS; break; case TABLE_ROW_END: alternativeRules = TABLE_ROW_END; break; case KEY_SPECIFIER_RHS: alternativeRules = KEY_SPECIFIER_RHS; break; case TABLE_KEY_RHS: alternativeRules = TABLE_KEY_RHS; break; case ERROR_TYPE_PARAMS: alternativeRules = ERROR_TYPE_PARAMS; break; case LET_VAR_DECL_START: alternativeRules = LET_VAR_DECL_START; break; case STREAM_TYPE_FIRST_PARAM_RHS: alternativeRules = STREAM_TYPE_FIRST_PARAM_RHS; break; case TEMPLATE_MEMBER: alternativeRules = TEMPLATE_MEMBER; break; case TEMPLATE_STRING_RHS: alternativeRules = TEMPLATE_STRING_RHS; break; case FUNCTION_KEYWORD_RHS: alternativeRules = FUNCTION_KEYWORD_RHS; break; case WORKER_NAME_RHS: alternativeRules = WORKER_NAME_RHS; break; case BINDING_PATTERN: alternativeRules = BINDING_PATTERN; break; case LIST_BINDING_PATTERN_END_OR_CONTINUE: alternativeRules = LIST_BINDING_PATTERN_END_OR_CONTINUE; break; case LIST_BINDING_PATTERN_CONTENTS: alternativeRules = LIST_BINDING_PATTERN_CONTENTS; break; case MAPPING_BINDING_PATTERN_END: alternativeRules = MAPPING_BINDING_PATTERN_END; break; case FIELD_BINDING_PATTERN_END: alternativeRules = FIELD_BINDING_PATTERN_END; break; case MAPPING_BINDING_PATTERN_MEMBER: alternativeRules = MAPPING_BINDING_PATTERN_MEMBER; break; case KEY_CONSTRAINTS_RHS: alternativeRules = KEY_CONSTRAINTS_RHS; break; case TABLE_TYPE_DESC_RHS: alternativeRules = TABLE_TYPE_DESC_RHS; break; case NEW_KEYWORD_RHS: alternativeRules = NEW_KEYWORD_RHS; break; case TABLE_CONSTRUCTOR_OR_QUERY_START: alternativeRules = TABLE_CONSTRUCTOR_OR_QUERY_START; break; case TABLE_CONSTRUCTOR_OR_QUERY_RHS: alternativeRules = TABLE_CONSTRUCTOR_OR_QUERY_RHS; break; case QUERY_PIPELINE_RHS: alternativeRules = QUERY_EXPRESSION_RHS; break; case BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS: case ANON_FUNC_PARAM_RHS: alternativeRules = BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS; break; case PARAM_END: alternativeRules = PARAM_END; break; case ANNOTATION_REF_RHS: alternativeRules = ANNOTATION_REF_RHS; break; case INFER_PARAM_END_OR_PARENTHESIS_END: alternativeRules = INFER_PARAM_END_OR_PARENTHESIS_END; break; case TYPE_DESC_IN_TUPLE_RHS: alternativeRules = TYPE_DESC_IN_TUPLE_RHS; break; case LIST_CONSTRUCTOR_MEMBER_END: alternativeRules = LIST_CONSTRUCTOR_MEMBER_END; break; case NIL_OR_PARENTHESISED_TYPE_DESC_RHS: alternativeRules = NIL_OR_PARENTHESISED_TYPE_DESC_RHS; break; case REMOTE_CALL_OR_ASYNC_SEND_RHS: alternativeRules = REMOTE_CALL_OR_ASYNC_SEND_RHS; break; case REMOTE_CALL_OR_ASYNC_SEND_END: alternativeRules = REMOTE_CALL_OR_ASYNC_SEND_END; break; case RECEIVE_WORKERS: alternativeRules = RECEIVE_WORKERS; break; case RECEIVE_FIELD: alternativeRules = RECEIVE_FIELD; break; case RECEIVE_FIELD_END: alternativeRules = RECEIVE_FIELD_END; break; case WAIT_KEYWORD_RHS: alternativeRules = WAIT_KEYWORD_RHS; break; case WAIT_FIELD_NAME_RHS: alternativeRules = WAIT_FIELD_NAME_RHS; break; case WAIT_FIELD_END: alternativeRules = WAIT_FIELD_END; break; case WAIT_FUTURE_EXPR_END: alternativeRules = WAIT_FUTURE_EXPR_END; break; case OPTIONAL_PEER_WORKER: alternativeRules = OPTIONAL_PEER_WORKER; break; case ENUM_MEMBER_START: alternativeRules = ENUM_MEMBER_START; break; case ENUM_MEMBER_INTERNAL_RHS: alternativeRules = ENUM_MEMBER_INTERNAL_RHS; break; case ENUM_MEMBER_RHS: alternativeRules = ENUM_MEMBER_RHS; break; case MEMBER_ACCESS_KEY_EXPR_END: alternativeRules = MEMBER_ACCESS_KEY_EXPR_END; break; case ROLLBACK_RHS: alternativeRules = ROLLBACK_RHS; break; case RETRY_KEYWORD_RHS: alternativeRules = RETRY_KEYWORD_RHS; break; case RETRY_TYPE_PARAM_RHS: alternativeRules = RETRY_TYPE_PARAM_RHS; break; case RETRY_BODY: alternativeRules = RETRY_BODY; break; case LIST_BP_OR_TUPLE_TYPE_MEMBER: alternativeRules = LIST_BP_OR_TUPLE_TYPE_MEMBER; break; case LIST_BP_OR_TUPLE_TYPE_DESC_RHS: alternativeRules = LIST_BP_OR_TUPLE_TYPE_DESC_RHS; break; case BRACKETED_LIST_MEMBER_END: alternativeRules = BRACKETED_LIST_MEMBER_END; break; case BRACKETED_LIST_MEMBER: alternativeRules = BRACKETED_LIST_MEMBER; break; case BRACKETED_LIST_RHS: case STMT_START_IDENTIFIER_RHS: alternativeRules = BRACKETED_LIST_RHS; break; case LIST_BINDING_MEMBER_OR_ARRAY_LENGTH: alternativeRules = LIST_BINDING_MEMBER_OR_ARRAY_LENGTH; break; default: throw new IllegalStateException(currentCtx.toString()); } return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, alternativeRules, isEntryPoint); } /** * Search for matching token sequences within different kinds of statements and returns the most optimal solution. * * @param currentCtx Current context * @param lookahead Position of the next token to consider, relative to the position of the original error * @param currentDepth Amount of distance traveled so far * @param currentMatches Matching tokens found so far * @param fixes Fixes made so far * @return Recovery result */ private Result seekInStatements(ParserRuleContext currentCtx, int lookahead, int currentDepth, int currentMatches, boolean isEntryPoint) { STToken nextToken = this.tokenReader.peek(lookahead);; if (nextToken.kind == SyntaxKind.SEMICOLON_TOKEN) { Result result = seekMatchInSubTree(ParserRuleContext.STATEMENT, lookahead + 1, currentDepth, isEntryPoint); result.fixes.push(new Solution(Action.REMOVE, currentCtx, nextToken.kind, nextToken.toString())); return getFinalResult(currentMatches, result); } return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, STATEMENTS, isEntryPoint); } /** * Search for matching token sequences within access expressions and returns the most optimal solution. * Access expression can be one of: method-call, field-access, member-access. * * @param currentCtx Current context * @param lookahead Position of the next token to consider, relative to the position of the original error * @param currentDepth Amount of distance traveled so far * @param currentMatches Matching tokens found so far * @param fixes Fixes made so far * @param isEntryPoint * @return Recovery result */ private Result seekInAccessExpression(ParserRuleContext currentCtx, int lookahead, int currentDepth, int currentMatches, boolean isEntryPoint) { STToken nextToken = this.tokenReader.peek(lookahead); currentDepth++; if (nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth); return getFinalResult(currentMatches, fixedPathResult); } ParserRuleContext nextContext; STToken nextNextToken = this.tokenReader.peek(lookahead + 1); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: nextContext = ParserRuleContext.OPEN_PARENTHESIS; break; case DOT_TOKEN: nextContext = ParserRuleContext.DOT; break; case OPEN_BRACKET_TOKEN: nextContext = ParserRuleContext.MEMBER_ACCESS_KEY_EXPR; break; default: nextContext = getNextRuleForExpr(); break; } currentMatches++; lookahead++; Result result = seekMatch(nextContext, lookahead, currentDepth, isEntryPoint); return getFinalResult(currentMatches, result); } /** * Search for a match in rhs of an expression. RHS of an expression can be the end * of the expression or the rhs of a binary expression. * * @param lookahead Position of the next token to consider, relative to the position of the original error * @param currentDepth Amount of distance traveled so far * @param currentMatches Matching tokens found so far * @param isEntryPoint * @return Recovery result */ private Result seekMatchInExpressionRhs(int lookahead, int currentDepth, int currentMatches, boolean isEntryPoint) { ParserRuleContext parentCtx = getParentContext(); ParserRuleContext[] next; switch (parentCtx) { case ARG_LIST: next = new ParserRuleContext[] { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.COMMA, ParserRuleContext.ARG_LIST_START, ParserRuleContext.ARG_LIST_END }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case MAPPING_CONSTRUCTOR: case MULTI_WAIT_FIELDS: next = new ParserRuleContext[] { ParserRuleContext.CLOSE_BRACE, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.COMMA, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case COMPUTED_FIELD_NAME: next = new ParserRuleContext[] { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.OPEN_BRACKET, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LISTENERS_LIST: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.OPEN_BRACE, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LIST_CONSTRUCTOR: case MEMBER_ACCESS_KEY_EXPR: case BRACKETED_LIST: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LET_EXPR_LET_VAR_DECL: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.IN_KEYWORD, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case LET_CLAUSE_LET_VAR_DECL: next = new ParserRuleContext[] { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.LET_CLAUSE_END, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); case QUERY_EXPRESSION: next = new ParserRuleContext[] { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.QUERY_PIPELINE_RHS, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); default: if (isParameter(parentCtx)) { next = new ParserRuleContext[] { ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.COMMA, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next, isEntryPoint); } break; } ParserRuleContext nextContext; if (parentCtx == ParserRuleContext.IF_BLOCK || parentCtx == ParserRuleContext.WHILE_BLOCK || parentCtx == ParserRuleContext.FOREACH_STMT) { nextContext = ParserRuleContext.BLOCK_STMT; } else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.RECORD_FIELD || parentCtx == ParserRuleContext.OBJECT_MEMBER || parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL) { nextContext = ParserRuleContext.SEMICOLON; } else if (parentCtx == ParserRuleContext.ANNOTATIONS) { nextContext = ParserRuleContext.ANNOTATION_END; } else if (parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { nextContext = ParserRuleContext.CLOSE_BRACKET; } else if (parentCtx == ParserRuleContext.INTERPOLATION) { nextContext = ParserRuleContext.CLOSE_BRACE; } else if (parentCtx == ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS) { nextContext = ParserRuleContext.CLOSE_PARENTHESIS; } else if (parentCtx == ParserRuleContext.FUNC_DEF) { nextContext = ParserRuleContext.SEMICOLON; } else if (parentCtx == ParserRuleContext.ALTERNATE_WAIT_EXPRS) { nextContext = ParserRuleContext.ALTERNATE_WAIT_EXPR_LIST_END; } else if (parentCtx == ParserRuleContext.CONDITIONAL_EXPRESSION) { nextContext = ParserRuleContext.COLON; } else if (parentCtx == ParserRuleContext.ENUM_MEMBER_LIST) { nextContext = ParserRuleContext.ENUM_MEMBER_RHS; } else { throw new IllegalStateException(parentCtx.toString()); } ParserRuleContext[] alternatives = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.IS_KEYWORD, ParserRuleContext.DOT, ParserRuleContext.ANNOT_CHAINING_TOKEN, ParserRuleContext.OPTIONAL_CHAINING_TOKEN, ParserRuleContext.CONDITIONAL_EXPRESSION, ParserRuleContext.MEMBER_ACCESS_KEY_EXPR, ParserRuleContext.RIGHT_ARROW, ParserRuleContext.SYNC_SEND_TOKEN, nextContext, ParserRuleContext.ARG_LIST_START }; return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, alternatives, isEntryPoint); } /** * Get the next parser rule/context given the current parser context. * * @param currentCtx Current parser context * @param nextLookahead Position of the next token to consider, relative to the position of the original error * @return Next parser context */ @Override protected ParserRuleContext getNextRule(ParserRuleContext currentCtx, int nextLookahead) { startContextIfRequired(currentCtx); ParserRuleContext parentCtx; STToken nextToken; switch (currentCtx) { case EOF: return ParserRuleContext.EOF; case COMP_UNIT: return ParserRuleContext.TOP_LEVEL_NODE; case PUBLIC_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.OBJECT_TYPE_DESCRIPTOR || parentCtx == ParserRuleContext.OBJECT_MEMBER) { return ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY; } else if (isParameter(parentCtx)) { return ParserRuleContext.TYPE_DESC_IN_PARAM; } return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER; case PRIVATE_KEYWORD: return ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY; case FUNC_DEF: case FUNC_DEF_OR_FUNC_TYPE: case FUNC_TYPE_DESC: case ANON_FUNC_EXPRESSION: return ParserRuleContext.FUNCTION_KEYWORD; case EXTERNAL_FUNC_BODY: return ParserRuleContext.ASSIGN_OP; case FUNC_BODY_BLOCK: return ParserRuleContext.OPEN_BRACE; case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: endContext(); return ParserRuleContext.CLOSE_BRACE; case ASSIGN_OP: return getNextRuleForEqualOp(); case COMPOUND_BINARY_OPERATOR: return ParserRuleContext.ASSIGN_OP; case CLOSE_BRACE: return getNextRuleForCloseBrace(nextLookahead); case CLOSE_PARENTHESIS: return getNextRuleForCloseParenthsis(); case EXPRESSION: case BASIC_LITERAL: case TERMINAL_EXPRESSION: return getNextRuleForExpr(); case EXTERNAL_KEYWORD: return ParserRuleContext.SEMICOLON; case FUNCTION_KEYWORD: return ParserRuleContext.FUNCTION_KEYWORD_RHS; case FUNC_NAME: return ParserRuleContext.OPEN_PARENTHESIS; case OPEN_BRACE: return getNextRuleForOpenBrace(nextLookahead); case OPEN_PARENTHESIS: return getNextRuleForOpenParenthesis(); case RETURNS_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC; case SEMICOLON: return getNextRuleForSemicolon(nextLookahead); case SIMPLE_TYPE_DESCRIPTOR: return ParserRuleContext.TYPEDESC_RHS; case VARIABLE_NAME: case PARAMETER_NAME_RHS: return getNextRuleForVarName(); case TOP_LEVEL_NODE_WITHOUT_MODIFIER: return ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE; case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: return ParserRuleContext.TYPE_DESC_IN_PARAM; case ASSIGNMENT_STMT: return ParserRuleContext.VARIABLE_NAME; case COMPOUND_ASSIGNMENT_STMT: return ParserRuleContext.VARIABLE_NAME; case VAR_DECL_STMT: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case EXPRESSION_RHS: return ParserRuleContext.BINARY_OPERATOR; case BINARY_OPERATOR: return ParserRuleContext.EXPRESSION; case COMMA: return getNextRuleForComma(); case AFTER_PARAMETER_TYPE: return getNextRuleForParamType(); case MODULE_TYPE_DEFINITION: return ParserRuleContext.TYPE_KEYWORD; case CLOSED_RECORD_BODY_END: endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TYPEDESC_RHS; case CLOSED_RECORD_BODY_START: return ParserRuleContext.RECORD_FIELD_OR_RECORD_END; case ELLIPSIS: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR || parentCtx == ParserRuleContext.ARG_LIST) { return ParserRuleContext.EXPRESSION; } if (parentCtx == ParserRuleContext.TYPE_DESC_IN_TUPLE || parentCtx == ParserRuleContext.LIST_BP_OR_TUPLE_TYPE_DESC || parentCtx == ParserRuleContext.BRACKETED_LIST) { return ParserRuleContext.CLOSE_BRACKET; } return ParserRuleContext.VARIABLE_NAME; case QUESTION_MARK: return getNextRuleForQuestionMark(); case RECORD_KEYWORD: return ParserRuleContext.RECORD_BODY_START; case TYPE_KEYWORD: return ParserRuleContext.TYPE_NAME; case RECORD_TYPE_DESCRIPTOR: return ParserRuleContext.RECORD_KEYWORD; case ASTERISK: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { return ParserRuleContext.CLOSE_BRACKET; } return ParserRuleContext.TYPE_REFERENCE; case TYPE_NAME: return ParserRuleContext.TYPE_DESC_IN_TYPE_DEF; case OBJECT_KEYWORD: return ParserRuleContext.OPEN_BRACE; case REMOTE_KEYWORD: return ParserRuleContext.FUNCTION_KEYWORD; case OBJECT_TYPE_DESCRIPTOR: return ParserRuleContext.OBJECT_TYPE_DESCRIPTOR_START; case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: return ParserRuleContext.OBJECT_KEYWORD; case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return ParserRuleContext.OBJECT_KEYWORD; case OPEN_BRACKET: return getNextRuleForOpenBracket(); case CLOSE_BRACKET: return getNextRuleForCloseBracket(); case DOT: return getNextRuleForDot(); case IF_KEYWORD: return ParserRuleContext.EXPRESSION; case ELSE_KEYWORD: return ParserRuleContext.ELSE_BODY; case BLOCK_STMT: return ParserRuleContext.OPEN_BRACE; case IF_BLOCK: return ParserRuleContext.IF_KEYWORD; case WHILE_BLOCK: return ParserRuleContext.WHILE_KEYWORD; case WHILE_KEYWORD: return ParserRuleContext.EXPRESSION; case CHECKING_KEYWORD: return ParserRuleContext.EXPRESSION; case CALL_STMT: return ParserRuleContext.CALL_STMT_START; case PANIC_STMT: return ParserRuleContext.PANIC_KEYWORD; case PANIC_KEYWORD: return ParserRuleContext.EXPRESSION; case FUNC_CALL: return ParserRuleContext.IMPORT_PREFIX; case IMPORT_KEYWORD: return ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME; case IMPORT_PREFIX: case NAMESPACE_PREFIX: return ParserRuleContext.SEMICOLON; case VERSION_NUMBER: case VERSION_KEYWORD: return ParserRuleContext.MAJOR_VERSION; case SLASH: return ParserRuleContext.IMPORT_MODULE_NAME; case IMPORT_ORG_OR_MODULE_NAME: return ParserRuleContext.IMPORT_DECL_RHS; case IMPORT_MODULE_NAME: return ParserRuleContext.AFTER_IMPORT_MODULE_NAME; case AS_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.IMPORT_DECL) { return ParserRuleContext.IMPORT_PREFIX; } else if (parentCtx == ParserRuleContext.XML_NAMESPACE_DECLARATION) { return ParserRuleContext.NAMESPACE_PREFIX; } throw new IllegalStateException(); case MAJOR_VERSION: case MINOR_VERSION: case IMPORT_SUB_VERSION: return ParserRuleContext.MAJOR_MINOR_VERSION_END; case PATCH_VERSION: return ParserRuleContext.IMPORT_PREFIX_DECL; case IMPORT_DECL: return ParserRuleContext.IMPORT_KEYWORD; case CONTINUE_STATEMENT: return ParserRuleContext.CONTINUE_KEYWORD; case BREAK_STATEMENT: return ParserRuleContext.BREAK_KEYWORD; case CONTINUE_KEYWORD: case BREAK_KEYWORD: return ParserRuleContext.SEMICOLON; case RETURN_STMT: return ParserRuleContext.RETURN_KEYWORD; case RETURN_KEYWORD: return ParserRuleContext.RETURN_STMT_RHS; case ACCESS_EXPRESSION: return ParserRuleContext.VARIABLE_REF; case MAPPING_FIELD_NAME: return ParserRuleContext.SPECIFIC_FIELD_RHS; case COLON: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) { return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.MULTI_RECEIVE_WORKERS) { return ParserRuleContext.PEER_WORKER_NAME; } else if (parentCtx == ParserRuleContext.MULTI_WAIT_FIELDS) { return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.CONDITIONAL_EXPRESSION) { endContext(); return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.MAPPING_BINDING_PATTERN) { return ParserRuleContext.VARIABLE_NAME; } else if (parentCtx == ParserRuleContext.FIELD_BINDING_PATTERN) { endContext(); return ParserRuleContext.VARIABLE_NAME; } return ParserRuleContext.IDENTIFIER; case STRING_LITERAL: return ParserRuleContext.COLON; case COMPUTED_FIELD_NAME: return ParserRuleContext.OPEN_BRACKET; case LISTENERS_LIST: return ParserRuleContext.EXPRESSION; case ON_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.ANNOTATION_DECL) { return ParserRuleContext.ANNOT_ATTACH_POINTS_LIST; } return ParserRuleContext.LISTENERS_LIST; case RESOURCE_KEYWORD: return ParserRuleContext.FUNC_DEF; case SERVICE_DECL: return ParserRuleContext.SERVICE_KEYWORD; case SERVICE_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION) { return ParserRuleContext.LISTENERS_LIST; } return ParserRuleContext.OPTIONAL_SERVICE_NAME; case SERVICE_NAME: return ParserRuleContext.ON_KEYWORD; case LISTENER_KEYWORD: return ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER; case LISTENER_DECL: return ParserRuleContext.LISTENER_KEYWORD; case FINAL_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case CONSTANT_DECL: return ParserRuleContext.CONST_KEYWORD; case CONST_KEYWORD: return ParserRuleContext.CONST_DECL_TYPE; case CONST_DECL_TYPE: return ParserRuleContext.CONST_DECL_RHS; case NIL_TYPE_DESCRIPTOR: return ParserRuleContext.OPEN_PARENTHESIS; case TYPEOF_EXPRESSION: return ParserRuleContext.TYPEOF_KEYWORD; case TYPEOF_KEYWORD: return ParserRuleContext.EXPRESSION; case OPTIONAL_TYPE_DESCRIPTOR: return ParserRuleContext.QUESTION_MARK; case UNARY_EXPRESSION: return ParserRuleContext.UNARY_OPERATOR; case UNARY_OPERATOR: return ParserRuleContext.EXPRESSION; case ARRAY_TYPE_DESCRIPTOR: return ParserRuleContext.OPEN_BRACKET; case ARRAY_LENGTH: return ParserRuleContext.CLOSE_BRACKET; case AT: return ParserRuleContext.ANNOT_REFERENCE; case DOC_STRING: return ParserRuleContext.ANNOTATIONS; case ANNOTATIONS: return ParserRuleContext.AT; case MAPPING_CONSTRUCTOR: return ParserRuleContext.OPEN_BRACE; case VARIABLE_REF: case TYPE_REFERENCE: case ANNOT_REFERENCE: case FIELD_ACCESS_IDENTIFIER: return ParserRuleContext.QUALIFIED_IDENTIFIER; case QUALIFIED_IDENTIFIER: nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.COLON_TOKEN) { return ParserRuleContext.COLON; } case IDENTIFIER: parentCtx = getParentContext(); switch (parentCtx) { case VARIABLE_REF: endContext(); return getNextRuleForExpr(); case TYPE_REFERENCE: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.SEMICOLON; case ANNOT_REFERENCE: endContext(); return ParserRuleContext.ANNOTATION_REF_RHS; case ANNOTATION_DECL: return ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS; case FIELD_ACCESS_IDENTIFIER: endContext(); return ParserRuleContext.EXPRESSION_RHS; default: throw new IllegalStateException(parentCtx.toString()); } case IS_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_EXPRESSION; case NULL_KEYWORD: return ParserRuleContext.EXPRESSION_RHS; case NIL_LITERAL: return ParserRuleContext.OPEN_PARENTHESIS; case LOCAL_TYPE_DEFINITION_STMT: return ParserRuleContext.TYPE_KEYWORD; case RIGHT_ARROW: return ParserRuleContext.EXPRESSION; case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STATEMENT_START_IDENTIFIER: return getNextRuleForDecimalIntegerLiteral(); case EXPRESSION_STATEMENT: return ParserRuleContext.EXPRESSION_STATEMENT_START; case MAP_KEYWORD: case FUTURE_KEYWORD: case LOCK_STMT: return ParserRuleContext.LOCK_KEYWORD; case LOCK_KEYWORD: return ParserRuleContext.BLOCK_STMT; case RECORD_FIELD: return ParserRuleContext.RECORD_FIELD_START; case ANNOTATION_TAG: return ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS; case ANNOTATION_KEYWORD: return ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE; case ANNOT_ATTACH_POINTS_LIST: return ParserRuleContext.ATTACH_POINT; case FIELD_IDENT: case FUNCTION_IDENT: case IDENT_AFTER_OBJECT_IDENT: case SINGLE_KEYWORD_ATTACH_POINT_IDENT: case ATTACH_POINT: return ParserRuleContext.ATTACH_POINT_END; case RECORD_FIELD_OR_RECORD_END: return ParserRuleContext.RECORD_BODY_END; case SOURCE_KEYWORD: return ParserRuleContext.ATTACH_POINT_IDENT; case OBJECT_IDENT: return ParserRuleContext.IDENT_AFTER_OBJECT_IDENT; case RECORD_IDENT: return ParserRuleContext.FIELD_IDENT; case RESOURCE_IDENT: return ParserRuleContext.FUNCTION_IDENT; case ANNOTATION_DECL: return ParserRuleContext.ANNOTATION_KEYWORD; case XML_NAMESPACE_DECLARATION: return ParserRuleContext.XMLNS_KEYWORD; case XMLNS_KEYWORD: return ParserRuleContext.CONSTANT_EXPRESSION; case CONSTANT_EXPRESSION: return ParserRuleContext.CONSTANT_EXPRESSION_START; case XML_NAMESPACE_PREFIX_DECL: return ParserRuleContext.SEMICOLON; case NAMED_WORKER_DECL: return ParserRuleContext.WORKER_KEYWORD; case WORKER_KEYWORD: return ParserRuleContext.WORKER_NAME; case WORKER_NAME: return ParserRuleContext.WORKER_NAME_RHS; case FORK_STMT: return ParserRuleContext.FORK_KEYWORD; case SERVICE_CONSTRUCTOR_EXPRESSION: return ParserRuleContext.SERVICE_KEYWORD; default: return getNextRuleInternal(currentCtx, nextLookahead); } } private ParserRuleContext getNextRuleInternal(ParserRuleContext currentCtx, int nextLookahead) { ParserRuleContext parentCtx; switch (currentCtx) { case FORK_KEYWORD: return ParserRuleContext.OPEN_BRACE; case TRAP_KEYWORD: return ParserRuleContext.EXPRESSION; case LIST_CONSTRUCTOR: return ParserRuleContext.OPEN_BRACKET; case FOREACH_STMT: return ParserRuleContext.FOREACH_KEYWORD; case FOREACH_KEYWORD: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case IN_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LET_EXPR_LET_VAR_DECL) { endContext(); } return ParserRuleContext.EXPRESSION; case TYPE_CAST: return ParserRuleContext.LT; case PIPE: if (getParentContext() == ParserRuleContext.ALTERNATE_WAIT_EXPRS) { return ParserRuleContext.EXPRESSION; } return ParserRuleContext.TYPE_DESCRIPTOR; case TABLE_CONSTRUCTOR: return ParserRuleContext.OPEN_BRACKET; case TABLE_KEYWORD: if (isInTypeDescContext()) { return ParserRuleContext.ROW_TYPE_PARAM; } return ParserRuleContext.TABLE_KEYWORD_RHS; case KEY_SPECIFIER: return ParserRuleContext.KEY_KEYWORD; case KEY_KEYWORD: if (isInTypeDescContext()) { return ParserRuleContext.KEY_CONSTRAINTS_RHS; } return ParserRuleContext.OPEN_PARENTHESIS; case ERROR_KEYWORD: if (isInTypeDescContext()) { return ParserRuleContext.ERROR_TYPE_PARAM_START; } return ParserRuleContext.ARG_LIST_START; case ERROR_TYPE_PARAM_START: return ParserRuleContext.ERROR_TYPE_PARAMS; case LET_EXPRESSION: return ParserRuleContext.LET_KEYWORD; case LET_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { return ParserRuleContext.LET_CLAUSE_LET_VAR_DECL; } else if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); return ParserRuleContext.LET_CLAUSE_LET_VAR_DECL; } return ParserRuleContext.LET_EXPR_LET_VAR_DECL; case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: return ParserRuleContext.LET_VAR_DECL_START; case STREAM_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION) { return ParserRuleContext.QUERY_EXPRESSION; } return ParserRuleContext.LT; case END_OF_TYPE_DESC: return getNextRuleForTypeDescriptor(); case TYPED_BINDING_PATTERN: return ParserRuleContext.TYPE_DESCRIPTOR; case CAPTURE_BINDING_PATTERN: return ParserRuleContext.VARIABLE_NAME; case REST_BINDING_PATTERN: return ParserRuleContext.ELLIPSIS; case LIST_BINDING_PATTERN: return ParserRuleContext.OPEN_BRACKET; case MAPPING_BINDING_PATTERN: return ParserRuleContext.OPEN_BRACE; case FIELD_BINDING_PATTERN: return ParserRuleContext.FIELD_BINDING_PATTERN_NAME; case FIELD_BINDING_PATTERN_NAME: return ParserRuleContext.FIELD_BINDING_PATTERN_END; case PARAMETERIZED_TYPE: return ParserRuleContext.LT; case NEW_KEYWORD: return ParserRuleContext.NEW_KEYWORD_RHS; case LT: return getNextRuleForLt(); case GT: return getNextRuleForGt(nextLookahead); case TEMPLATE_END: return ParserRuleContext.EXPRESSION_RHS; case TEMPLATE_START: return ParserRuleContext.TEMPLATE_BODY; case TEMPLATE_BODY: return ParserRuleContext.TEMPLATE_MEMBER; case TEMPLATE_STRING: return ParserRuleContext.TEMPLATE_STRING_RHS; case INTERPOLATION_START_TOKEN: return ParserRuleContext.EXPRESSION; case XML_KEYWORD: case STRING_KEYWORD: case BASE16_KEYWORD: case BASE64_KEYWORD: return ParserRuleContext.TEMPLATE_START; case ARG_LIST_START: return ParserRuleContext.ARG_LIST; case ARG_LIST_END: endContext(); return ParserRuleContext.EXPRESSION_RHS; case ARG_LIST: return ParserRuleContext.ARG_START_OR_ARG_LIST_END; case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: case TYPE_DESC_IN_TUPLE: return ParserRuleContext.TYPE_DESCRIPTOR; case VAR_DECL_STARTED_WITH_DENTIFIER: startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); return ParserRuleContext.TYPEDESC_RHS; case INFERRED_TYPE_DESC: return ParserRuleContext.GT; case ROW_TYPE_PARAM: return ParserRuleContext.LT; case PARENTHESISED_TYPE_DESC_START: return ParserRuleContext.TYPE_DESC_IN_PARENTHESIS; case SELECT_CLAUSE: return ParserRuleContext.SELECT_KEYWORD; case SELECT_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { endContext(); } if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); endContext(); } return ParserRuleContext.EXPRESSION; case WHERE_CLAUSE: return ParserRuleContext.WHERE_KEYWORD; case WHERE_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); } return ParserRuleContext.EXPRESSION; case FROM_CLAUSE: return ParserRuleContext.FROM_KEYWORD; case FROM_KEYWORD: parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL) { endContext(); } return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case LET_CLAUSE: return ParserRuleContext.LET_KEYWORD; case QUERY_EXPRESSION: return ParserRuleContext.FROM_CLAUSE; case TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION: return ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START; case BITWISE_AND_OPERATOR: return ParserRuleContext.TYPE_DESCRIPTOR; case EXPR_FUNC_BODY_START: return ParserRuleContext.EXPRESSION; case AMBIGUOUS_FUNC_TYPE_DESC_RHS: endContext(); startContext(ParserRuleContext.VAR_DECL_STMT); startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); return ParserRuleContext.TYPEDESC_RHS; case FUNC_TYPE_DESC_END: endContext(); return ParserRuleContext.TYPEDESC_RHS; case IMPLICIT_ANON_FUNC_PARAM: return ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS; case EXPLICIT_ANON_FUNC_EXPR_BODY_START: endContext(); return ParserRuleContext.EXPR_FUNC_BODY_START; case OBJECT_MEMBER: return ParserRuleContext.OBJECT_MEMBER_START; case ANNOTATION_END: return getNextRuleForAnnotationEnd(nextLookahead); case START_KEYWORD: return ParserRuleContext.EXPRESSION; case FLUSH_KEYWORD: return ParserRuleContext.OPTIONAL_PEER_WORKER; case PEER_WORKER_NAME: case DEFAULT_KEYWORD: if (getParentContext() == ParserRuleContext.MULTI_RECEIVE_WORKERS) { return ParserRuleContext.RECEIVE_FIELD_END; } return ParserRuleContext.EXPRESSION_RHS; case PLUS_TOKEN: case MINUS_TOKEN: return ParserRuleContext.SIGNED_INT_OR_FLOAT_RHS; case SIGNED_INT_OR_FLOAT_RHS: return getNextRuleForExpr(); case TUPLE_TYPE_DESC_START: return ParserRuleContext.TYPE_DESC_IN_TUPLE; case TYPE_DESC_IN_TUPLE_RHS: return ParserRuleContext.OPEN_BRACKET; case WORKER_NAME_OR_METHOD_NAME: return ParserRuleContext.WORKER_NAME_OR_METHOD_NAME; case DEFAULT_WORKER_NAME_IN_ASYNC_SEND: return ParserRuleContext.SEMICOLON; case SYNC_SEND_TOKEN: return ParserRuleContext.PEER_WORKER_NAME; case LEFT_ARROW_TOKEN: return ParserRuleContext.RECEIVE_WORKERS; case MULTI_RECEIVE_WORKERS: return ParserRuleContext.OPEN_BRACE; case RECEIVE_FIELD_NAME: return ParserRuleContext.COLON; case WAIT_KEYWORD: return ParserRuleContext.WAIT_KEYWORD_RHS; case WAIT_FIELD_NAME: return ParserRuleContext.WAIT_FIELD_NAME_RHS; case ALTERNATE_WAIT_EXPR_LIST_END: return getNextRuleForWaitExprListEnd(); case MULTI_WAIT_FIELDS: return ParserRuleContext.OPEN_BRACE; case ALTERNATE_WAIT_EXPRS: return ParserRuleContext.EXPRESSION; case ANNOT_CHAINING_TOKEN: return ParserRuleContext.FIELD_ACCESS_IDENTIFIER; case DO_CLAUSE: return ParserRuleContext.DO_KEYWORD; case DO_KEYWORD: return ParserRuleContext.OPEN_BRACE; case LET_CLAUSE_END: endContext(); return ParserRuleContext.QUERY_PIPELINE_RHS; case MEMBER_ACCESS_KEY_EXPR: return ParserRuleContext.OPEN_BRACKET; case OPTIONAL_CHAINING_TOKEN: return ParserRuleContext.FIELD_ACCESS_IDENTIFIER; case CONDITIONAL_EXPRESSION: return ParserRuleContext.QUESTION_MARK; case TRANSACTION_STMT: return ParserRuleContext.TRANSACTION_KEYWORD; case RETRY_STMT: return ParserRuleContext.RETRY_KEYWORD; case ROLLBACK_STMT: return ParserRuleContext.ROLLBACK_KEYWORD; case TRANSACTION_KEYWORD: return ParserRuleContext.BLOCK_STMT; case COMMIT_KEYWORD: return ParserRuleContext.EXPRESSION_RHS; case ROLLBACK_KEYWORD: return ParserRuleContext.ROLLBACK_RHS; case RETRY_KEYWORD: return ParserRuleContext.RETRY_KEYWORD_RHS; case TRANSACTIONAL_KEYWORD: return ParserRuleContext.EXPRESSION_RHS; case MODULE_ENUM_DECLARATION: return ParserRuleContext.ENUM_KEYWORD; case ENUM_KEYWORD: return ParserRuleContext.MODULE_ENUM_NAME; case MODULE_ENUM_NAME: return ParserRuleContext.OPEN_BRACE; case ENUM_MEMBER_LIST: return ParserRuleContext.ENUM_MEMBER_START; case ENUM_MEMBER_NAME: return ParserRuleContext.ENUM_MEMBER_INTERNAL_RHS; case TYPED_BINDING_PATTERN_TYPE_RHS: return ParserRuleContext.BINDING_PATTERN; case UNION_OR_INTERSECTION_TOKEN: return ParserRuleContext.TYPE_DESCRIPTOR; default: throw new IllegalStateException("cannot find the next rule for: " + currentCtx); } } private void startContextIfRequired(ParserRuleContext currentCtx) { switch (currentCtx) { case COMP_UNIT: case FUNC_DEF_OR_FUNC_TYPE: case ANON_FUNC_EXPRESSION: case FUNC_DEF: case FUNC_TYPE_DESC: case EXTERNAL_FUNC_BODY: case FUNC_BODY_BLOCK: case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case VAR_DECL_STMT: case ASSIGNMENT_STMT: case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: case MODULE_TYPE_DEFINITION: case RECORD_FIELD: case RECORD_TYPE_DESCRIPTOR: case OBJECT_TYPE_DESCRIPTOR: case ARG_LIST: case OBJECT_FUNC_OR_FIELD: case IF_BLOCK: case BLOCK_STMT: case WHILE_BLOCK: case PANIC_STMT: case CALL_STMT: case IMPORT_DECL: case CONTINUE_STATEMENT: case BREAK_STATEMENT: case RETURN_STMT: case COMPUTED_FIELD_NAME: case LISTENERS_LIST: case SERVICE_DECL: case LISTENER_DECL: case CONSTANT_DECL: case NIL_TYPE_DESCRIPTOR: case COMPOUND_ASSIGNMENT_STMT: case OPTIONAL_TYPE_DESCRIPTOR: case ARRAY_TYPE_DESCRIPTOR: case ANNOTATIONS: case VARIABLE_REF: case TYPE_REFERENCE: case ANNOT_REFERENCE: case FIELD_ACCESS_IDENTIFIER: case MAPPING_CONSTRUCTOR: case LOCAL_TYPE_DEFINITION_STMT: case EXPRESSION_STATEMENT: case NIL_LITERAL: case LOCK_STMT: case ANNOTATION_DECL: case ANNOT_ATTACH_POINTS_LIST: case XML_NAMESPACE_DECLARATION: case CONSTANT_EXPRESSION: case NAMED_WORKER_DECL: case FORK_STMT: case FOREACH_STMT: case LIST_CONSTRUCTOR: case TYPE_CAST: case KEY_SPECIFIER: case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: case ROW_TYPE_PARAM: case TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION: case OBJECT_MEMBER: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case REST_BINDING_PATTERN: case TYPED_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case MULTI_RECEIVE_WORKERS: case MULTI_WAIT_FIELDS: case ALTERNATE_WAIT_EXPRS: case DO_CLAUSE: case MEMBER_ACCESS_KEY_EXPR: case CONDITIONAL_EXPRESSION: case TRANSACTION_STMT: case RETRY_STMT: case ROLLBACK_STMT: case MODULE_ENUM_DECLARATION: case ENUM_MEMBER_LIST: case SERVICE_CONSTRUCTOR_EXPRESSION: case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: case TYPE_DESC_IN_TUPLE: startContext(currentCtx); break; default: break; } switch (currentCtx) { case TABLE_CONSTRUCTOR: case QUERY_EXPRESSION: switchContext(currentCtx); break; default: break; } } private ParserRuleContext getNextRuleForCloseParenthsis() { ParserRuleContext parentCtx; parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.PARAM_LIST) { endContext(); return ParserRuleContext.FUNC_OPTIONAL_RETURNS; } else if (isParameter(parentCtx)) { endContext(); endContext(); return ParserRuleContext.FUNC_OPTIONAL_RETURNS; } else if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR) { endContext(); return ParserRuleContext.TYPEDESC_RHS; } else if (parentCtx == ParserRuleContext.NIL_LITERAL) { endContext(); return getNextRuleForExpr(); } else if (parentCtx == ParserRuleContext.KEY_SPECIFIER) { endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS; } else if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } else if (parentCtx == ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS) { endContext(); return ParserRuleContext.INFER_PARAM_END_OR_PARENTHESIS_END; } return ParserRuleContext.EXPRESSION_RHS; } private ParserRuleContext getNextRuleForOpenParenthesis() { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.EXPRESSION_STATEMENT) { return ParserRuleContext.EXPRESSION_STATEMENT_START; } else if (isStatement(parentCtx) || isExpressionContext(parentCtx) || parentCtx == ParserRuleContext.ARRAY_TYPE_DESCRIPTOR) { return ParserRuleContext.EXPRESSION; } else if (parentCtx == ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE || parentCtx == ParserRuleContext.FUNC_TYPE_DESC || parentCtx == ParserRuleContext.FUNC_DEF || parentCtx == ParserRuleContext.ANON_FUNC_EXPRESSION) { startContext(ParserRuleContext.PARAM_LIST); return ParserRuleContext.PARAM_LIST; } else if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR || parentCtx == ParserRuleContext.NIL_LITERAL) { return ParserRuleContext.CLOSE_PARENTHESIS; } else if (parentCtx == ParserRuleContext.KEY_SPECIFIER) { return ParserRuleContext.KEY_SPECIFIER_RHS; } else if (isInTypeDescContext()) { startContext(ParserRuleContext.KEY_SPECIFIER); return ParserRuleContext.KEY_SPECIFIER_RHS; } else if (isParameter(parentCtx)) { return ParserRuleContext.EXPRESSION; } return ParserRuleContext.EXPRESSION; } private ParserRuleContext getNextRuleForOpenBrace(int nextLookahead) { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LISTENERS_LIST) { endContext(); } switch (parentCtx) { case OBJECT_TYPE_DESCRIPTOR: return ParserRuleContext.OBJECT_MEMBER; case RECORD_TYPE_DESCRIPTOR: return ParserRuleContext.RECORD_FIELD; case MAPPING_CONSTRUCTOR: return ParserRuleContext.FIRST_MAPPING_FIELD; case FORK_STMT: return ParserRuleContext.NAMED_WORKER_DECL; case MULTI_RECEIVE_WORKERS: return ParserRuleContext.RECEIVE_FIELD; case MULTI_WAIT_FIELDS: return ParserRuleContext.WAIT_FIELD_NAME; case MODULE_ENUM_DECLARATION: return ParserRuleContext.ENUM_MEMBER_LIST; case MAPPING_BINDING_PATTERN: return ParserRuleContext.MAPPING_BINDING_PATTERN_MEMBER; default: return ParserRuleContext.STATEMENT; } } private boolean isExpressionContext(ParserRuleContext ctx) { switch (ctx) { case LISTENERS_LIST: case MAPPING_CONSTRUCTOR: case COMPUTED_FIELD_NAME: case LIST_CONSTRUCTOR: case INTERPOLATION: case ARG_LIST: case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: case TABLE_CONSTRUCTOR: case QUERY_EXPRESSION: case TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION: case SERVICE_CONSTRUCTOR_EXPRESSION: return true; default: return false; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForParamType() { ParserRuleContext parentCtx; parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.REQUIRED_PARAM || parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) { return ParserRuleContext.VARIABLE_NAME; } else if (parentCtx == ParserRuleContext.REST_PARAM) { return ParserRuleContext.ELLIPSIS; } else { throw new IllegalStateException(); } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForComma() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case PARAM_LIST: case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: endContext(); return parentCtx; case ARG_LIST: return ParserRuleContext.ARG_START; case MAPPING_CONSTRUCTOR: return ParserRuleContext.MAPPING_FIELD; case LISTENERS_LIST: case LIST_CONSTRUCTOR: return ParserRuleContext.EXPRESSION; case ANNOT_ATTACH_POINTS_LIST: return ParserRuleContext.ATTACH_POINT; case TABLE_CONSTRUCTOR: return ParserRuleContext.MAPPING_CONSTRUCTOR; case KEY_SPECIFIER: return ParserRuleContext.VARIABLE_NAME; case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: return ParserRuleContext.LET_VAR_DECL_START; case TYPE_DESC_IN_STREAM_TYPE_DESC: return ParserRuleContext.TYPE_DESCRIPTOR; case BRACED_EXPR_OR_ANON_FUNC_PARAMS: return ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM; case TYPE_DESC_IN_TUPLE: return ParserRuleContext.TYPE_DESCRIPTOR; case LIST_BINDING_PATTERN: return ParserRuleContext.LIST_BINDING_PATTERN_CONTENTS; case MAPPING_BINDING_PATTERN: return ParserRuleContext.MAPPING_BINDING_PATTERN_MEMBER; case MULTI_RECEIVE_WORKERS: return ParserRuleContext.RECEIVE_FIELD; case MULTI_WAIT_FIELDS: return ParserRuleContext.WAIT_FIELD_NAME; case ENUM_MEMBER_LIST: return ParserRuleContext.ENUM_MEMBER_START; case MEMBER_ACCESS_KEY_EXPR: return ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END; case LIST_BP_OR_TUPLE_TYPE_DESC: return ParserRuleContext.LIST_BP_OR_TUPLE_TYPE_MEMBER; case BRACKETED_LIST: return ParserRuleContext.BRACKETED_LIST_MEMBER; default: throw new IllegalStateException(parentCtx.toString()); } } /** * Get the next parser context to visit after a type descriptor. * * @return Next parser context */ private ParserRuleContext getNextRuleForTypeDescriptor() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case TYPE_DESC_IN_ANNOTATION_DECL: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.ANNOTATION_TAG; case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.VARIABLE_NAME; case TYPE_DESC_IN_TYPE_BINDING_PATTERN: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } if (getParentContext() == ParserRuleContext.FOREACH_STMT) { return ParserRuleContext.BINDING_PATTERN; } return ParserRuleContext.VARIABLE_NAME; case TYPE_DESC_IN_PARAM: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.AFTER_PARAMETER_TYPE; case TYPE_DESC_IN_TYPE_DEF: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.SEMICOLON; case TYPE_DESC_IN_ANGLE_BRACKETS: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.GT; case TYPE_DESC_IN_RETURN_TYPE_DESC: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } parentCtx = getParentContext(); switch (parentCtx) { case FUNC_TYPE_DESC: endContext(); return ParserRuleContext.TYPEDESC_RHS; case FUNC_DEF_OR_FUNC_TYPE: return ParserRuleContext.FUNC_BODY_OR_TYPE_DESC_RHS; case FUNC_DEF: return ParserRuleContext.FUNC_BODY; case ANON_FUNC_EXPRESSION: return ParserRuleContext.ANON_FUNC_BODY; case NAMED_WORKER_DECL: return ParserRuleContext.BLOCK_STMT; default: throw new IllegalStateException(parentCtx.toString()); } case TYPE_DESC_IN_EXPRESSION: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.EXPRESSION_RHS; case COMP_UNIT: /* * Fact 1: * ------ * FUNC_DEF_OR_FUNC_TYPE is only possible for module level construct or object member * that starts with 'function' keyword. However, until the end of func-signature, * we don't know whether this is a func-def or a function type. * Hence a var-decl-stmt context is not started until this point. * * Fact 2: * ------ * We reach here for END_OF_TYPE_DESC context. That means we are going to end the * func-type-desc. */ startContext(ParserRuleContext.VAR_DECL_STMT); return ParserRuleContext.VARIABLE_NAME; case OBJECT_MEMBER: return ParserRuleContext.VARIABLE_NAME; case ANNOTATION_DECL: return ParserRuleContext.IDENTIFIER; case TYPE_DESC_IN_STREAM_TYPE_DESC: return ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS; case TYPE_DESC_IN_PARENTHESIS: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.CLOSE_PARENTHESIS; case TYPE_DESC_IN_NEW_EXPR: endContext(); if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } return ParserRuleContext.ARG_LIST_START; case TYPE_DESC_IN_TUPLE: case LIST_BP_OR_TUPLE_TYPE_DESC: return ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS; default: return ParserRuleContext.EXPRESSION_RHS; } } private boolean isInTypeDescContext() { switch (getParentContext()) { case TYPE_DESC_IN_ANNOTATION_DECL: case TYPE_DESC_BEFORE_IDENTIFIER: case TYPE_DESC_IN_RECORD_FIELD: case TYPE_DESC_IN_PARAM: case TYPE_DESC_IN_TYPE_BINDING_PATTERN: case TYPE_DESC_IN_TYPE_DEF: case TYPE_DESC_IN_ANGLE_BRACKETS: case TYPE_DESC_IN_RETURN_TYPE_DESC: case TYPE_DESC_IN_EXPRESSION: case TYPE_DESC_IN_STREAM_TYPE_DESC: case TYPE_DESC_IN_PARENTHESIS: case TYPE_DESC_IN_NEW_EXPR: case TYPE_DESC_IN_TUPLE: case LIST_BP_OR_TUPLE_TYPE_DESC: case BRACKETED_LIST: return true; default: return false; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForEqualOp() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case EXTERNAL_FUNC_BODY: return ParserRuleContext.EXTERNAL_KEYWORD; case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case RECORD_FIELD: case ARG_LIST: case OBJECT_MEMBER: case LISTENER_DECL: case CONSTANT_DECL: case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: case ENUM_MEMBER_LIST: return ParserRuleContext.EXPRESSION; default: if (isStatement(parentCtx)) { return ParserRuleContext.EXPRESSION; } throw new IllegalStateException("equal op cannot exist in a " + parentCtx); } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @param nextLookahead Position of the next token to consider, relative to the position of the original error * @return Next parser context */ private ParserRuleContext getNextRuleForCloseBrace(int nextLookahead) { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case FUNC_BODY_BLOCK: endContext(); STToken nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } parentCtx = getParentContext(); switch (parentCtx) { case SERVICE_DECL: return ParserRuleContext.RESOURCE_DEF; case OBJECT_MEMBER: return ParserRuleContext.OBJECT_MEMBER_START; case COMP_UNIT: return ParserRuleContext.TOP_LEVEL_NODE; case FUNC_DEF: case FUNC_DEF_OR_FUNC_TYPE: endContext(); return ParserRuleContext.TOP_LEVEL_NODE; case ANON_FUNC_EXPRESSION: default: endContext(); return ParserRuleContext.EXPRESSION_RHS; } case SERVICE_DECL: endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; case OBJECT_MEMBER: endContext(); case RECORD_TYPE_DESCRIPTOR: case OBJECT_TYPE_DESCRIPTOR: endContext(); return ParserRuleContext.TYPEDESC_RHS; case BLOCK_STMT: endContext(); parentCtx = getParentContext(); switch (parentCtx) { case LOCK_STMT: case FOREACH_STMT: case WHILE_BLOCK: case RETRY_STMT: endContext(); return ParserRuleContext.STATEMENT; case IF_BLOCK: endContext(); return ParserRuleContext.ELSE_BLOCK; case TRANSACTION_STMT: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.RETRY_STMT) { endContext(); } return ParserRuleContext.STATEMENT; case NAMED_WORKER_DECL: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.FORK_STMT) { nextToken = this.tokenReader.peek(nextLookahead); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: return ParserRuleContext.CLOSE_BRACE; default: return ParserRuleContext.STATEMENT; } } else { return ParserRuleContext.STATEMENT; } default: return ParserRuleContext.STATEMENT; } case MAPPING_CONSTRUCTOR: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.TABLE_CONSTRUCTOR) { return ParserRuleContext.TABLE_ROW_END; } if (parentCtx == ParserRuleContext.ANNOTATIONS) { return ParserRuleContext.ANNOTATION_END; } return getNextRuleForExpr(); case LIST_BP_OR_TUPLE_TYPE_DESC: return ParserRuleContext.BRACKETED_LIST_MEMBER_END; case MAPPING_BINDING_PATTERN: endContext(); return getNextRuleForTypedBindingPattern(); case FORK_STMT: endContext(); return ParserRuleContext.STATEMENT; case INTERPOLATION: endContext(); return ParserRuleContext.TEMPLATE_MEMBER; case MULTI_RECEIVE_WORKERS: case MULTI_WAIT_FIELDS: case SERVICE_CONSTRUCTOR_EXPRESSION: case DO_CLAUSE: endContext(); return ParserRuleContext.EXPRESSION_RHS; case ENUM_MEMBER_LIST: endContext(); endContext(); return ParserRuleContext.TOP_LEVEL_NODE; default: throw new IllegalStateException("found close-brace in: " + parentCtx); } } private ParserRuleContext getNextRuleForAnnotationEnd(int nextLookahead) { ParserRuleContext parentCtx; STToken nextToken; nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return ParserRuleContext.AT; } endContext(); parentCtx = getParentContext(); switch (parentCtx) { case COMP_UNIT: return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA; case FUNC_DEF: case FUNC_TYPE_DESC: case FUNC_DEF_OR_FUNC_TYPE: case ANON_FUNC_EXPRESSION: return ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC; case LET_EXPR_LET_VAR_DECL: case LET_CLAUSE_LET_VAR_DECL: return ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN; case RECORD_FIELD: return ParserRuleContext.RECORD_FIELD_WITHOUT_METADATA; case OBJECT_MEMBER: return ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA; case SERVICE_DECL: return ParserRuleContext.RESOURCE_DEF; case FUNC_BODY_BLOCK: return ParserRuleContext.STATEMENT_WITHOUT_ANNOTS; case EXTERNAL_FUNC_BODY: return ParserRuleContext.EXTERNAL_KEYWORD; case TYPE_CAST: return ParserRuleContext.TYPE_CAST_PARAM_RHS; case ENUM_MEMBER_LIST: return ParserRuleContext.ENUM_MEMBER_NAME; default: if (isParameter(parentCtx)) { return ParserRuleContext.REQUIRED_PARAM; } return ParserRuleContext.EXPRESSION; } } /** * Get the next parser context to visit after a variable/parameter name. * * @return Next parser context */ /** * Get the next parser context to visit after a {@link ParserRuleContext * * @param nextLookahead Position of the next token to consider, relative to the position of the original error * @return Next parser context */ private ParserRuleContext getNextRuleForSemicolon(int nextLookahead) { STToken nextToken; ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.EXTERNAL_FUNC_BODY) { endContext(); endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.QUERY_EXPRESSION) { endContext(); return getNextRuleForSemicolon(nextLookahead); } else if (isExpressionContext(parentCtx)) { endContext(); return ParserRuleContext.STATEMENT; } else if (parentCtx == ParserRuleContext.VAR_DECL_STMT) { endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.COMP_UNIT) { return ParserRuleContext.TOP_LEVEL_NODE; } return ParserRuleContext.STATEMENT; } else if (isStatement(parentCtx)) { endContext(); return ParserRuleContext.STATEMENT; } else if (parentCtx == ParserRuleContext.RECORD_FIELD) { endContext(); return ParserRuleContext.RECORD_FIELD_OR_RECORD_END; } else if (parentCtx == ParserRuleContext.MODULE_TYPE_DEFINITION || parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL || parentCtx == ParserRuleContext.ANNOTATION_DECL || parentCtx == ParserRuleContext.XML_NAMESPACE_DECLARATION) { endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) { if (isEndOfObjectTypeNode(nextLookahead)) { endContext(); return ParserRuleContext.CLOSE_BRACE; } return ParserRuleContext.OBJECT_MEMBER_START; } else if (parentCtx == ParserRuleContext.IMPORT_DECL) { endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.ANNOT_ATTACH_POINTS_LIST) { endContext(); endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else if (parentCtx == ParserRuleContext.FUNC_DEF || parentCtx == ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE) { endContext(); nextToken = this.tokenReader.peek(nextLookahead); if (nextToken.kind == SyntaxKind.EOF_TOKEN) { return ParserRuleContext.EOF; } return ParserRuleContext.TOP_LEVEL_NODE; } else { throw new IllegalStateException(parentCtx.toString()); } } private ParserRuleContext getNextRuleForDot() { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.IMPORT_DECL) { return ParserRuleContext.IMPORT_MODULE_NAME; } return ParserRuleContext.FIELD_ACCESS_IDENTIFIER; } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForQuestionMark() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case OPTIONAL_TYPE_DESCRIPTOR: endContext(); return ParserRuleContext.TYPEDESC_RHS; case CONDITIONAL_EXPRESSION: return ParserRuleContext.EXPRESSION; default: return ParserRuleContext.SEMICOLON; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForOpenBracket() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case ARRAY_TYPE_DESCRIPTOR: return ParserRuleContext.ARRAY_LENGTH; case LIST_CONSTRUCTOR: return ParserRuleContext.LIST_CONSTRUCTOR_FIRST_MEMBER; case TABLE_CONSTRUCTOR: return ParserRuleContext.ROW_LIST_RHS; case LIST_BINDING_PATTERN: return ParserRuleContext.LIST_BINDING_PATTERN_CONTENTS; default: if (isInTypeDescContext()) { return ParserRuleContext.TYPE_DESC_IN_TUPLE; } return ParserRuleContext.EXPRESSION; } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForCloseBracket() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case ARRAY_TYPE_DESCRIPTOR: case TYPE_DESC_IN_TUPLE: endContext(); return ParserRuleContext.TYPEDESC_RHS; case COMPUTED_FIELD_NAME: endContext(); return ParserRuleContext.COLON; case LIST_BINDING_PATTERN: endContext(); return getNextRuleForTypedBindingPattern(); case LIST_CONSTRUCTOR: case TABLE_CONSTRUCTOR: case MEMBER_ACCESS_KEY_EXPR: endContext(); return getNextRuleForExpr(); case LIST_BP_OR_TUPLE_TYPE_DESC: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LIST_BP_OR_TUPLE_TYPE_DESC) { return ParserRuleContext.BRACKETED_LIST_MEMBER_END; } return ParserRuleContext.LIST_BP_OR_TUPLE_TYPE_DESC_RHS; case BRACKETED_LIST: endContext(); return ParserRuleContext.BRACKETED_LIST_RHS; default: return getNextRuleForExpr(); } } /** * Get the next parser context to visit after a {@link ParserRuleContext * * @return Next parser context */ private ParserRuleContext getNextRuleForDecimalIntegerLiteral() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case CONSTANT_EXPRESSION: endContext(); return getNextRuleForConstExpr(); case ARRAY_TYPE_DESCRIPTOR: default: return ParserRuleContext.CLOSE_BRACKET; } } private ParserRuleContext getNextRuleForExpr() { ParserRuleContext parentCtx; parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.CONSTANT_EXPRESSION) { endContext(); return getNextRuleForConstExpr(); } return ParserRuleContext.EXPRESSION_RHS; } private ParserRuleContext getNextRuleForConstExpr() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case XML_NAMESPACE_DECLARATION: return ParserRuleContext.XML_NAMESPACE_PREFIX_DECL; default: if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } throw new IllegalStateException(parentCtx.toString()); } } private ParserRuleContext getNextRuleForLt() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case TYPE_CAST: return ParserRuleContext.TYPE_CAST_PARAM; default: return ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS; } } private ParserRuleContext getNextRuleForGt(int nextLookahead) { ParserRuleContext parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC) { endContext(); return ParserRuleContext.TYPEDESC_RHS; } if (isInTypeDescContext()) { return ParserRuleContext.TYPEDESC_RHS; } if (parentCtx == ParserRuleContext.ROW_TYPE_PARAM) { endContext(); return ParserRuleContext.TABLE_TYPE_DESC_RHS; } else if (parentCtx == ParserRuleContext.RETRY_STMT) { return ParserRuleContext.RETRY_TYPE_PARAM_RHS; } endContext(); return ParserRuleContext.EXPRESSION; } /** * Get the next parser context to visit after a typed-binding-pattern. * * @return Next parser context */ private ParserRuleContext getNextRuleForTypedBindingPattern() { ParserRuleContext parentCtx = getParentContext(); switch (parentCtx) { case CAPTURE_BINDING_PATTERN: case TYPED_BINDING_PATTERN: endContext(); return getNextRuleForTypedBindingPattern(); case FOREACH_STMT: return ParserRuleContext.IN_KEYWORD; case LIST_BINDING_PATTERN: case LIST_BP_OR_TUPLE_TYPE_DESC: case BRACKETED_LIST: return ParserRuleContext.LIST_BINDING_PATTERN_END_OR_CONTINUE; case MAPPING_BINDING_PATTERN: return ParserRuleContext.MAPPING_BINDING_PATTERN_END; case REST_BINDING_PATTERN: endContext(); parentCtx = getParentContext(); if (parentCtx == ParserRuleContext.LIST_BINDING_PATTERN) { return ParserRuleContext.CLOSE_BRACKET; } return ParserRuleContext.CLOSE_BRACE; case ASSIGNMENT_OR_VAR_DECL_STMT: case VAR_DECL_STMT: case AMBIGUOUS_STMT: return ParserRuleContext.VAR_DECL_STMT_RHS; case LET_CLAUSE_LET_VAR_DECL: case LET_EXPR_LET_VAR_DECL: return ParserRuleContext.ASSIGN_OP; default: throw new IllegalStateException(parentCtx.toString()); } } private ParserRuleContext getNextRuleForWaitExprListEnd() { endContext(); return ParserRuleContext.EXPRESSION_RHS; } /** * Check whether the given context is a statement. * * @param ctx Parser context to check * @return <code>true</code> if the given context is a statement. <code>false</code> otherwise */ private boolean isStatement(ParserRuleContext parentCtx) { switch (parentCtx) { case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: case VAR_DECL_STMT: case ASSIGNMENT_STMT: case ASSIGNMENT_OR_VAR_DECL_STMT: case IF_BLOCK: case BLOCK_STMT: case WHILE_BLOCK: case CALL_STMT: case PANIC_STMT: case CONTINUE_STATEMENT: case BREAK_STATEMENT: case RETURN_STMT: case COMPOUND_ASSIGNMENT_STMT: case LOCAL_TYPE_DEFINITION_STMT: case EXPRESSION_STATEMENT: case LOCK_STMT: case FORK_STMT: case FOREACH_STMT: case TRANSACTION_STMT: case RETRY_STMT: case ROLLBACK_STMT: case AMBIGUOUS_STMT: return true; default: return false; } } /** * Check whether the given token refers to a binary operator. * * @param token Token to check * @return <code>true</code> if the given token refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(STToken token) { switch (token.kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: case ELVIS_TOKEN: return true; case RIGHT_ARROW_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } private boolean isParameter(ParserRuleContext ctx) { switch (ctx) { case REQUIRED_PARAM: case DEFAULTABLE_PARAM: case REST_PARAM: case PARAM_LIST: return true; default: return false; } } /** * Get the expected token kind at the given parser rule context. If the parser rule is a terminal, * then the corresponding terminal token kind is returned. If the parser rule is a production, * then {@link SyntaxKind * * @param ctx Parser rule context * @return Token kind expected at the given parser rule */ @Override protected SyntaxKind getExpectedTokenKind(ParserRuleContext ctx) { switch (ctx) { case ASSIGN_OP: return SyntaxKind.EQUAL_TOKEN; case BINARY_OPERATOR: return SyntaxKind.PLUS_TOKEN; case CLOSE_BRACE: return SyntaxKind.CLOSE_BRACE_TOKEN; case CLOSE_PARENTHESIS: case ARG_LIST_END: return SyntaxKind.CLOSE_PAREN_TOKEN; case COMMA: return SyntaxKind.COMMA_TOKEN; case EXTERNAL_KEYWORD: return SyntaxKind.EXTERNAL_KEYWORD; case FUNCTION_KEYWORD: return SyntaxKind.FUNCTION_KEYWORD; case FUNC_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case OPEN_BRACE: return SyntaxKind.OPEN_BRACE_TOKEN; case OPEN_PARENTHESIS: case ARG_LIST_START: return SyntaxKind.OPEN_PAREN_TOKEN; case RETURNS_KEYWORD: return SyntaxKind.RETURNS_KEYWORD; case SEMICOLON: return SyntaxKind.SEMICOLON_TOKEN; case VARIABLE_NAME: case STATEMENT_START_IDENTIFIER: return SyntaxKind.IDENTIFIER_TOKEN; case PUBLIC_KEYWORD: return SyntaxKind.PUBLIC_KEYWORD; case ASSIGNMENT_STMT: return SyntaxKind.IDENTIFIER_TOKEN; case EXPRESSION_RHS: return SyntaxKind.PLUS_TOKEN; case EXPRESSION: case TERMINAL_EXPRESSION: return SyntaxKind.IDENTIFIER_TOKEN; case EXTERNAL_FUNC_BODY: return SyntaxKind.EQUAL_TOKEN; case FUNC_BODY_OR_TYPE_DESC_RHS: case FUNC_BODY_BLOCK: return SyntaxKind.OPEN_BRACE_TOKEN; case FUNC_DEF: case FUNC_DEF_OR_FUNC_TYPE: case FUNC_TYPE_DESC: return SyntaxKind.FUNCTION_KEYWORD; case VAR_DECL_STMT_RHS: return SyntaxKind.SEMICOLON_TOKEN; case SIMPLE_TYPE_DESCRIPTOR: case REQUIRED_PARAM: case VAR_DECL_STMT: case ASSIGNMENT_OR_VAR_DECL_STMT: case DEFAULTABLE_PARAM: case REST_PARAM: return SyntaxKind.TYPE_DESC; case ASTERISK: case INFERRED_TYPE_DESC: return SyntaxKind.ASTERISK_TOKEN; case CLOSED_RECORD_BODY_END: return SyntaxKind.CLOSE_BRACE_PIPE_TOKEN; case CLOSED_RECORD_BODY_START: return SyntaxKind.OPEN_BRACE_PIPE_TOKEN; case ELLIPSIS: return SyntaxKind.ELLIPSIS_TOKEN; case QUESTION_MARK: return SyntaxKind.QUESTION_MARK_TOKEN; case RECORD_BODY_START: return SyntaxKind.OPEN_BRACE_PIPE_TOKEN; case RECORD_FIELD: case RECORD_KEYWORD: return SyntaxKind.RECORD_KEYWORD; case TYPE_KEYWORD: return SyntaxKind.TYPE_KEYWORD; case TYPE_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case TYPE_REFERENCE: case FIELD_ACCESS_IDENTIFIER: return SyntaxKind.IDENTIFIER_TOKEN; case RECORD_BODY_END: return SyntaxKind.CLOSE_BRACE_TOKEN; case OBJECT_KEYWORD: return SyntaxKind.OBJECT_KEYWORD; case PRIVATE_KEYWORD: return SyntaxKind.PRIVATE_KEYWORD; case REMOTE_KEYWORD: return SyntaxKind.REMOTE_KEYWORD; case OBJECT_FIELD_RHS: return SyntaxKind.SEMICOLON_TOKEN; case ABSTRACT_KEYWORD: return SyntaxKind.ABSTRACT_KEYWORD; case CLIENT_KEYWORD: return SyntaxKind.CLIENT_KEYWORD; case OBJECT_TYPE_FIRST_QUALIFIER: case OBJECT_TYPE_SECOND_QUALIFIER: return SyntaxKind.OBJECT_KEYWORD; case CLOSE_BRACKET: case MEMBER_ACCESS_KEY_EXPR_END: return SyntaxKind.CLOSE_BRACKET_TOKEN; case DOT: return SyntaxKind.DOT_TOKEN; case OPEN_BRACKET: case TUPLE_TYPE_DESC_START: return SyntaxKind.OPEN_BRACKET_TOKEN; case IF_KEYWORD: return SyntaxKind.IF_KEYWORD; case ELSE_KEYWORD: return SyntaxKind.ELSE_KEYWORD; case WHILE_KEYWORD: return SyntaxKind.WHILE_KEYWORD; case CHECKING_KEYWORD: return SyntaxKind.CHECK_KEYWORD; case AS_KEYWORD: return SyntaxKind.AS_KEYWORD; case BOOLEAN_LITERAL: return SyntaxKind.TRUE_KEYWORD; case IMPORT_KEYWORD: return SyntaxKind.IMPORT_KEYWORD; case IMPORT_MODULE_NAME: case IMPORT_ORG_OR_MODULE_NAME: case IMPORT_PREFIX: case VARIABLE_REF: case BASIC_LITERAL: case SERVICE_NAME: case IDENTIFIER: case QUALIFIED_IDENTIFIER: case NAMESPACE_PREFIX: case IMPLICIT_ANON_FUNC_PARAM: case WORKER_NAME_OR_METHOD_NAME: case PEER_WORKER_NAME: case RECEIVE_FIELD_NAME: case WAIT_FIELD_NAME: case FIELD_BINDING_PATTERN_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case VERSION_NUMBER: case MAJOR_VERSION: case MINOR_VERSION: case PATCH_VERSION: return SyntaxKind.DECIMAL_INTEGER_LITERAL; case SLASH: return SyntaxKind.SLASH_TOKEN; case VERSION_KEYWORD: return SyntaxKind.VERSION_KEYWORD; case IMPORT_DECL_RHS: return SyntaxKind.SEMICOLON_TOKEN; case IMPORT_SUB_VERSION: return SyntaxKind.SEMICOLON_TOKEN; case COLON: return SyntaxKind.COLON_TOKEN; case MAPPING_FIELD_NAME: case MAPPING_FIELD: return SyntaxKind.IDENTIFIER_TOKEN; case PANIC_KEYWORD: return SyntaxKind.PANIC_KEYWORD; case STRING_LITERAL: return SyntaxKind.STRING_LITERAL; case ON_KEYWORD: return SyntaxKind.ON_KEYWORD; case RESOURCE_KEYWORD: return SyntaxKind.RESOURCE_KEYWORD; case RETURN_KEYWORD: return SyntaxKind.RETURN_KEYWORD; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_KEYWORD; case BREAK_KEYWORD: return SyntaxKind.BREAK_KEYWORD; case LISTENER_KEYWORD: return SyntaxKind.CONST_KEYWORD; case CONTINUE_KEYWORD: return SyntaxKind.CONTINUE_KEYWORD; case CONST_KEYWORD: return SyntaxKind.CONST_KEYWORD; case FINAL_KEYWORD: return SyntaxKind.FINAL_KEYWORD; case CONST_DECL_TYPE: return SyntaxKind.IDENTIFIER_TOKEN; case NIL_TYPE_DESCRIPTOR: return SyntaxKind.NIL_TYPE_DESC; case TYPEOF_KEYWORD: return SyntaxKind.TYPEOF_KEYWORD; case OPTIONAL_TYPE_DESCRIPTOR: return SyntaxKind.OPTIONAL_TYPE_DESC; case UNARY_OPERATOR: return SyntaxKind.PLUS_TOKEN; case ARRAY_TYPE_DESCRIPTOR: return SyntaxKind.ARRAY_TYPE_DESC; case AT: return SyntaxKind.AT_TOKEN; case FIELD_DESCRIPTOR_RHS: return SyntaxKind.SEMICOLON_TOKEN; case AFTER_PARAMETER_TYPE: return SyntaxKind.IDENTIFIER_TOKEN; case CONST_DECL_RHS: return SyntaxKind.EQUAL_TOKEN; case IS_KEYWORD: return SyntaxKind.IS_KEYWORD; case OBJECT_MEMBER_WITHOUT_METADATA: case RECORD_FIELD_WITHOUT_METADATA: case PARAMETER_WITHOUT_ANNOTS: case TYPE_DESCRIPTOR: return SyntaxKind.TYPE_DESC; case TYPEOF_EXPRESSION: return SyntaxKind.TYPEOF_KEYWORD; case RIGHT_ARROW: return SyntaxKind.RIGHT_ARROW_TOKEN; case STMT_START_WITH_EXPR_RHS: return SyntaxKind.EQUAL_TOKEN; case COMPOUND_BINARY_OPERATOR: return SyntaxKind.PLUS_TOKEN; case UNARY_EXPRESSION: return SyntaxKind.PLUS_TOKEN; case MAP_KEYWORD: return SyntaxKind.MAP_KEYWORD; case FUTURE_KEYWORD: return SyntaxKind.FUTURE_KEYWORD; case TYPEDESC_KEYWORD: return SyntaxKind.TYPEDESC_KEYWORD; case GT: return SyntaxKind.GT_TOKEN; case LT: return SyntaxKind.LT_TOKEN; case NULL_KEYWORD: return SyntaxKind.NULL_KEYWORD; case LOCK_KEYWORD: return SyntaxKind.LOCK_KEYWORD; case ANNOTATION_KEYWORD: return SyntaxKind.ANNOTATION_KEYWORD; case ANNOT_DECL_OPTIONAL_TYPE: return SyntaxKind.IDENTIFIER_TOKEN; case ANNOT_DECL_RHS: return SyntaxKind.ON_KEYWORD; case ARRAY_LENGTH: return SyntaxKind.DECIMAL_INTEGER_LITERAL; case ATTACH_POINT_IDENT: case IDENT_AFTER_OBJECT_IDENT: case SINGLE_KEYWORD_ATTACH_POINT_IDENT: return SyntaxKind.TYPE_KEYWORD; case FIELD_IDENT: return SyntaxKind.FIELD_KEYWORD; case FUNCTION_IDENT: return SyntaxKind.FUNCTION_KEYWORD; case HEX_INTEGER_LITERAL: return SyntaxKind.HEX_INTEGER_LITERAL; case RECORD_FIELD_OR_RECORD_END: return SyntaxKind.CLOSE_BRACE_TOKEN; case SOURCE_KEYWORD: return SyntaxKind.SOURCE_KEYWORD; case ATTACH_POINT_END: return SyntaxKind.SEMICOLON_TOKEN; case CONSTANT_EXPRESSION: return SyntaxKind.STRING_LITERAL; case CONSTANT_EXPRESSION_START: case OBJECT_IDENT: return SyntaxKind.OBJECT_KEYWORD; case RECORD_IDENT: return SyntaxKind.RECORD_KEYWORD; case RESOURCE_IDENT: return SyntaxKind.RESOURCE_KEYWORD; case XMLNS_KEYWORD: case XML_NAMESPACE_DECLARATION: return SyntaxKind.XMLNS_KEYWORD; case XML_NAMESPACE_PREFIX_DECL: return SyntaxKind.SEMICOLON_TOKEN; case NAMED_WORKER_DECL: case WORKER_KEYWORD: return SyntaxKind.WORKER_KEYWORD; case WORKER_NAME: case NAMED_WORKERS: case ANNOTATION_TAG: return SyntaxKind.IDENTIFIER_TOKEN; case NIL_LITERAL: return SyntaxKind.OPEN_PAREN_TOKEN; case FORK_KEYWORD: return SyntaxKind.FORK_KEYWORD; case DECIMAL_FLOATING_POINT_LITERAL: return SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL; case HEX_FLOATING_POINT_LITERAL: return SyntaxKind.HEX_FLOATING_POINT_LITERAL; case PARAMETERIZED_TYPE: return SyntaxKind.MAP_KEYWORD; case TRAP_KEYWORD: return SyntaxKind.TRAP_KEYWORD; case FOREACH_KEYWORD: return SyntaxKind.FOREACH_KEYWORD; case IN_KEYWORD: return SyntaxKind.IN_KEYWORD; case PIPE: case UNION_OR_INTERSECTION_TOKEN: return SyntaxKind.PIPE_TOKEN; case TABLE_KEYWORD: return SyntaxKind.TABLE_KEYWORD; case KEY_KEYWORD: return SyntaxKind.KEY_KEYWORD; case ERROR_KEYWORD: return SyntaxKind.ERROR_KEYWORD; case STREAM_KEYWORD: return SyntaxKind.STREAM_KEYWORD; case LET_KEYWORD: return SyntaxKind.LET_KEYWORD; case TEMPLATE_END: case TEMPLATE_START: return SyntaxKind.BACKTICK_TOKEN; case LT_TOKEN: return SyntaxKind.LT_TOKEN; case GT_TOKEN: return SyntaxKind.GT_TOKEN; case INTERPOLATION_START_TOKEN: return SyntaxKind.INTERPOLATION_START_TOKEN; case XML_KEYWORD: return SyntaxKind.XML_KEYWORD; case XML_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case STRING_KEYWORD: return SyntaxKind.STRING_KEYWORD; case BASE16_KEYWORD: return SyntaxKind.BASE16_KEYWORD; case BASE64_KEYWORD: return SyntaxKind.BASE64_KEYWORD; case SELECT_KEYWORD: return SyntaxKind.SELECT_KEYWORD; case WHERE_KEYWORD: return SyntaxKind.WHERE_KEYWORD; case FROM_KEYWORD: return SyntaxKind.FROM_KEYWORD; case EXPR_FUNC_BODY_START: return SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN; case STATEMENT: case STATEMENT_WITHOUT_ANNOTS: return SyntaxKind.CLOSE_BRACE_TOKEN; case START_KEYWORD: return SyntaxKind.START_KEYWORD; case FLUSH_KEYWORD: return SyntaxKind.FLUSH_KEYWORD; case DEFAULT_KEYWORD: case OPTIONAL_PEER_WORKER: case DEFAULT_WORKER_NAME_IN_ASYNC_SEND: return SyntaxKind.DEFAULT_KEYWORD; case DECIMAL_INTEGER_LITERAL: case SIGNED_INT_OR_FLOAT_RHS: return SyntaxKind.DECIMAL_INTEGER_LITERAL; case SYNC_SEND_TOKEN: return SyntaxKind.SYNC_SEND_TOKEN; case WAIT_KEYWORD: return SyntaxKind.WAIT_KEYWORD; case ANNOT_CHAINING_TOKEN: return SyntaxKind.ANNOT_CHAINING_TOKEN; case OPTIONAL_CHAINING_TOKEN: return SyntaxKind.OPTIONAL_CHAINING_TOKEN; case TRANSACTION_KEYWORD: return SyntaxKind.TRANSACTION_KEYWORD; case COMMIT_KEYWORD: return SyntaxKind.COMMIT_KEYWORD; case RETRY_KEYWORD: return SyntaxKind.RETRY_KEYWORD; case ROLLBACK_KEYWORD: return SyntaxKind.ROLLBACK_KEYWORD; case ENUM_KEYWORD: return SyntaxKind.ENUM_KEYWORD; case MODULE_ENUM_NAME: case ENUM_MEMBER_NAME: return SyntaxKind.IDENTIFIER_TOKEN; case ENUM_MEMBER_INTERNAL_RHS: case ENUM_MEMBER_RHS: return SyntaxKind.CLOSE_BRACE_TOKEN; case TYPED_BINDING_PATTERN_TYPE_RHS: return SyntaxKind.IDENTIFIER_TOKEN; default: break; } return SyntaxKind.NONE; } /** * Check whether a token kind is a basic literal. * * @param kind Token kind to check * @return <code>true</code> if the given token kind belongs to a basic literal.<code>false</code> otherwise */ private boolean isBasicLiteral(SyntaxKind kind) { switch (kind) { case DECIMAL_INTEGER_LITERAL: case HEX_INTEGER_LITERAL: case STRING_LITERAL: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL: case HEX_FLOATING_POINT_LITERAL: return true; default: return false; } } /** * Check whether the given token refers to a unary operator. * * @param token Token to check * @return <code>true</code> if the given token refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(STToken token) { switch (token.kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } private boolean isSingleKeywordAttachPointIdent(SyntaxKind tokenKind) { switch (tokenKind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: return true; default: return false; } } /** * Check whether the given token is a parameterized type keyword. * * @param tokenKind Token to check * @return <code>true</code> if the given token is a parameterized type keyword. <code>false</code> otherwise */ public boolean isParameterizedTypeToken(SyntaxKind tokenKind) { switch (tokenKind) { case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: return true; default: return false; } } }
Add a rule to check whether this jar exists
public static URLClassLoader getClassLoader(String jarPath, ClassLoader parent) throws MalformedURLException { URL url = new URL(jarPath); return URLClassLoader.newInstance(new URL[] {url}, parent); }
URL url = new URL(jarPath);
public static URLClassLoader getClassLoader(String jarPath, ClassLoader parent) throws MalformedURLException, FileNotFoundException { File file = new File(jarPath); if (!file.exists()) { throw new FileNotFoundException("Can not find local file: " + jarPath); } URL url = file.toURI().toURL(); return URLClassLoader.newInstance(new URL[] {url}, parent); }
class || c == Character.class) { return Sets.newHashSet(JavaUdfDataType.CHAR); }
class || c == Character.class) { return Sets.newHashSet(JavaUdfDataType.CHAR); }
nit: shorter branch first is slightly easier to read
private Schema payloadSchema() { if (useFlatSchema()) { Schema.Builder builder = Schema.builder(); for (Schema.Field field : messageSchema().getFields()) { if (field.getName().equals(TIMESTAMP_FIELD)) { continue; } builder.addField(field); } return builder.build(); } else { return messageSchema().getField(PAYLOAD_FIELD).getType().getRowSchema(); } }
continue;
private Schema payloadSchema() { if (!useFlatSchema()) { return messageSchema().getField(PAYLOAD_FIELD).getType().getRowSchema(); } else { return new Schema( messageSchema().getFields().stream() .filter(f -> !f.getName().equals(TIMESTAMP_FIELD)) .collect(Collectors.toList())); } }
class PubsubMessageToRow extends DoFn<PubsubMessage, Row> { static final String TIMESTAMP_FIELD = "event_timestamp"; static final String ATTRIBUTES_FIELD = "attributes"; static final String PAYLOAD_FIELD = "payload"; static final TupleTag<PubsubMessage> DLQ_TAG = new TupleTag<PubsubMessage>() {}; static final TupleTag<Row> MAIN_TAG = new TupleTag<Row>() {}; private transient volatile @Nullable ObjectMapper objectMapper; /** * Schema of the Pubsub message. * * <p>Required to have exactly 3 top level fields at the moment: * * <ul> * <li>'event_timestamp' of type {@link Schema.FieldType * <li>'attributes' of type {@link TypeName * <li>'payload' of type {@link TypeName * </ul> * * <p>Only UTF-8 JSON objects are supported. */ public abstract Schema messageSchema(); public abstract boolean useDlq(); public abstract boolean useFlatSchema(); public static Builder builder() { return new AutoValue_PubsubMessageToRow.Builder(); } @DoFn.ProcessElement public void processElement(ProcessContext context) { try { List<Object> values = getFieldValues(context); context.output(Row.withSchema(messageSchema()).addValues(values).build()); } catch (UnsupportedRowJsonException jsonException) { if (useDlq()) { context.output(DLQ_TAG, context.element()); } else { throw new RuntimeException("Error parsing message", jsonException); } } } /** * Get values for fields in the same order they're specified in schema, including timestamp, * payload, and attributes. */ private List<Object> getFieldValues(ProcessContext context) { Row payload = parsePayloadJsonRow(context.element()); return messageSchema().getFields().stream() .map( field -> getValueForField( field, context.timestamp(), context.element().getAttributeMap(), payload)) .collect(toList()); } private Object getValueForField( Schema.Field field, Instant timestamp, Map<String, String> attributeMap, Row payload) { if (useFlatSchema()) { if (field.getName().equals(TIMESTAMP_FIELD)) { return timestamp; } else { return payload.getValue(field.getName()); } } else { switch (field.getName()) { case TIMESTAMP_FIELD: return timestamp; case ATTRIBUTES_FIELD: return attributeMap; case PAYLOAD_FIELD: return payload; default: throw new IllegalArgumentException( "Unexpected field '" + field.getName() + "' in top level schema" + " for Pubsub message. Top level schema should only contain " + "'timestamp', 'attributes', and 'payload' fields"); } } } private Row parsePayloadJsonRow(PubsubMessage pubsubMessage) { String payloadJson = new String(pubsubMessage.getPayload(), StandardCharsets.UTF_8); if (objectMapper == null) { objectMapper = newObjectMapperWith(RowJsonDeserializer.forSchema(payloadSchema())); } return RowJsonUtils.jsonToRow(objectMapper, payloadJson); } @AutoValue.Builder abstract static class Builder { public abstract Builder messageSchema(Schema messageSchema); public abstract Builder useDlq(boolean useDlq); public abstract Builder useFlatSchema(boolean useFlatSchema); public abstract PubsubMessageToRow build(); } }
class PubsubMessageToRow extends DoFn<PubsubMessage, Row> { static final String TIMESTAMP_FIELD = "event_timestamp"; static final String ATTRIBUTES_FIELD = "attributes"; static final String PAYLOAD_FIELD = "payload"; static final TupleTag<PubsubMessage> DLQ_TAG = new TupleTag<PubsubMessage>() {}; static final TupleTag<Row> MAIN_TAG = new TupleTag<Row>() {}; private transient volatile @Nullable ObjectMapper objectMapper; /** * Schema of the Pubsub message. * * <p>Required to have at least 'event_timestamp' field of type {@link Schema.FieldType * * <p>If {@code useFlatSchema()} is set every other field is assumed to be part of the payload. * Otherwise, the schema must contain exactly: * * <ul> * <li>'attributes' of type {@link TypeName * <li>'payload' of type {@link TypeName * </ul> * * <p>Only UTF-8 JSON objects are supported. */ public abstract Schema messageSchema(); public abstract boolean useDlq(); public abstract boolean useFlatSchema(); public static Builder builder() { return new AutoValue_PubsubMessageToRow.Builder(); } @DoFn.ProcessElement public void processElement(ProcessContext context) { try { List<Object> values = getFieldValues(context); context.output(Row.withSchema(messageSchema()).addValues(values).build()); } catch (UnsupportedRowJsonException jsonException) { if (useDlq()) { context.output(DLQ_TAG, context.element()); } else { throw new RuntimeException("Error parsing message", jsonException); } } } /** * Get values for fields in the same order they're specified in schema, including timestamp, * payload, and attributes. */ private List<Object> getFieldValues(ProcessContext context) { Row payload = parsePayloadJsonRow(context.element()); return messageSchema().getFields().stream() .map( field -> getValueForField( field, context.timestamp(), context.element().getAttributeMap(), payload)) .collect(toList()); } private Object getValueForField( Schema.Field field, Instant timestamp, Map<String, String> attributeMap, Row payload) { if (useFlatSchema()) { if (field.getName().equals(TIMESTAMP_FIELD)) { return timestamp; } else { return payload.getValue(field.getName()); } } else { switch (field.getName()) { case TIMESTAMP_FIELD: return timestamp; case ATTRIBUTES_FIELD: return attributeMap; case PAYLOAD_FIELD: return payload; default: throw new IllegalArgumentException( "Unexpected field '" + field.getName() + "' in top level schema" + " for Pubsub message. Top level schema should only contain " + "'timestamp', 'attributes', and 'payload' fields"); } } } private Row parsePayloadJsonRow(PubsubMessage pubsubMessage) { String payloadJson = new String(pubsubMessage.getPayload(), StandardCharsets.UTF_8); if (objectMapper == null) { objectMapper = newObjectMapperWith(RowJsonDeserializer.forSchema(payloadSchema())); } return RowJsonUtils.jsonToRow(objectMapper, payloadJson); } @AutoValue.Builder abstract static class Builder { public abstract Builder messageSchema(Schema messageSchema); public abstract Builder useDlq(boolean useDlq); public abstract Builder useFlatSchema(boolean useFlatSchema); public abstract PubsubMessageToRow build(); } }
Is it valid when no Condition, or no Action? If it is valid, should we let user create without calling them? Would avoid user write `define...().withMatchConditions().withActions().attach()`
public CdnStandardRulesEngineRuleImpl withActions(DeliveryRuleAction... actions) { List<DeliveryRuleAction> actionList = new ArrayList<>(); if (actions != null) { actionList.addAll(Arrays.asList(actions)); } innerModel().withActions(actionList); return this; }
}
public CdnStandardRulesEngineRuleImpl withActions(DeliveryRuleAction... actions) { List<DeliveryRuleAction> actionList = new ArrayList<>(); if (actions != null) { actionList.addAll(Arrays.asList(actions)); } innerModel().withActions(actionList); return this; }
class CdnStandardRulesEngineRuleImpl extends ChildResourceImpl<DeliveryRule, CdnEndpointImpl, CdnEndpoint> implements CdnStandardRulesEngineRule, CdnStandardRulesEngineRule.Definition<CdnEndpointImpl>, CdnStandardRulesEngineRule.Update<CdnEndpointImpl> { CdnStandardRulesEngineRuleImpl(CdnEndpointImpl parent, String name) { this(parent, new DeliveryRule().withName(name)); } CdnStandardRulesEngineRuleImpl(CdnEndpointImpl parent, DeliveryRule deliveryRule) { super(deliveryRule, parent); } @Override public CdnStandardRulesEngineRuleImpl withOrder(int order) { innerModel().withOrder(order); return this; } @Override public CdnStandardRulesEngineRuleImpl withMatchConditions(DeliveryRuleCondition... matchConditions) { List<DeliveryRuleCondition> conditions = new ArrayList<>(); if (matchConditions != null) { conditions.addAll(Arrays.asList(matchConditions)); } innerModel().withConditions(conditions); return this; } @Override @Override public String name() { return innerModel().name(); } @Override public CdnEndpointImpl attach() { return parent(); } }
class CdnStandardRulesEngineRuleImpl extends ChildResourceImpl<DeliveryRule, CdnEndpointImpl, CdnEndpoint> implements CdnStandardRulesEngineRule, CdnStandardRulesEngineRule.Definition<CdnEndpointImpl>, CdnStandardRulesEngineRule.Update<CdnEndpointImpl> { CdnStandardRulesEngineRuleImpl(CdnEndpointImpl parent, String name) { this(parent, new DeliveryRule().withName(name)); } CdnStandardRulesEngineRuleImpl(CdnEndpointImpl parent, DeliveryRule deliveryRule) { super(deliveryRule, parent); } @Override public CdnStandardRulesEngineRuleImpl withOrder(int order) { innerModel().withOrder(order); return this; } @Override public CdnStandardRulesEngineRuleImpl withMatchConditions(DeliveryRuleCondition... matchConditions) { List<DeliveryRuleCondition> conditions = new ArrayList<>(); if (matchConditions != null) { conditions.addAll(Arrays.asList(matchConditions)); } innerModel().withConditions(conditions); return this; } @Override @Override public String name() { return innerModel().name(); } @Override public CdnEndpointImpl attach() { return parent(); } }
Shouldn't we reset the original TCCL after the command is run?
public void run(Runnable command) { Thread.currentThread().setContextClassLoader(classLoader); command.run(); }
command.run();
public void run(Runnable command) { ClassLoader originalTccl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(classLoader); try { command.run(); } finally { Thread.currentThread().setContextClassLoader(originalTccl); } }
class DevModeWrapper { private final ClassLoader classLoader; public DevModeWrapper(ClassLoader contextClassLoader) { classLoader = contextClassLoader; } }
class DevModeWrapper { private final ClassLoader classLoader; public DevModeWrapper(ClassLoader contextClassLoader) { classLoader = contextClassLoader; } }
According with documentation, I could do just this. Tell me what do you think. ``` Path artifact = target.getOutputDirectory().resolve(name); File file = new File(artifact.toString()); file.setExecutable(true, true); ```
public static void writeExecutableFile(OutputTargetBuildItem target, String name, String output) throws IOException { writeFile(target, name, output); Path artifact = target.getOutputDirectory().resolve(name); Set<PosixFilePermission> permissions = new HashSet<>(); permissions.add(PosixFilePermission.OWNER_READ); permissions.add(PosixFilePermission.OWNER_WRITE); permissions.add(PosixFilePermission.OWNER_EXECUTE); Files.setPosixFilePermissions(artifact, permissions); }
Files.setPosixFilePermissions(artifact, permissions);
public static void writeExecutableFile(OutputTargetBuildItem target, String name, String output) throws IOException { writeFile(target, name, output); Path artifact = target.getOutputDirectory().resolve(name); artifact.toFile().setExecutable(true, true); }
class LambdaUtil { /** * Strips period, dash, and numbers. Turns characters after to uppercase. i.e. * Also strips "-SNAPSHOT" from end of name. * * "foo.bar-1.0-SNAPSHOT" to "FooBar" * * @param basename * @return */ public static String artifactToLambda(String basename) { if (basename.endsWith("-SNAPSHOT")) basename = basename.substring(0, basename.length() - "-SNAPSHOT".length()); String name = convertToken(basename, "[^a-zA-Z]"); return name.trim(); } protected static String convertToken(String basename, String token) { String[] splits = basename.split(token); if (splits == null || splits.length == 0) return basename; String name = ""; for (String split : splits) { split = split.trim(); if (split.isEmpty()) continue; name = name + split.substring(0, 1).toUpperCase() + split.substring(1).toLowerCase(); } return name; } public static void writeFile(OutputTargetBuildItem target, String name, String output) throws IOException { Path artifact = target.getOutputDirectory().resolve(name); String targetUri = target.getOutputDirectory().resolve("function.zip").toUri().toString().replace("file:", "fileb:"); output = output.replace("${artifactId}", target.getBaseName()) .replace("${buildDir}", target.getOutputDirectory().toString()) .replace("${targetUri}", targetUri); Files.write(artifact, output.getBytes(StandardCharsets.UTF_8)); } public static String copyResource(String resource) throws Exception { try (InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource)) { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); int nRead; byte[] data = new byte[1024]; while ((nRead = inputStream.read(data, 0, data.length)) != -1) { buffer.write(data, 0, nRead); } buffer.flush(); byte[] byteArray = buffer.toByteArray(); return new String(byteArray, StandardCharsets.UTF_8); } } public static void generateScripts(String handler, OutputTargetBuildItem target) throws Exception { String output = copyResource("lambda/bootstrap-example.sh"); writeExecutableFile(target, "bootstrap-example.sh", output); String lambdaName = artifactToLambda(target.getBaseName()); output = copyResource("lambda/manage.sh") .replace("${handler}", handler) .replace("${lambdaName}", lambdaName); writeExecutableFile(target, "manage.sh", output); output = copyResource("lambda/sam.jvm.yaml") .replace("${handler}", handler) .replace("${lambdaName}", lambdaName); writeFile(target, "sam.jvm.yaml", output); output = copyResource("lambda/sam.native.yaml") .replace("${lambdaName}", lambdaName); writeFile(target, "sam.native.yaml", output); } }
class LambdaUtil { /** * Strips period, dash, and numbers. Turns characters after to uppercase. i.e. * Also strips "-SNAPSHOT" from end of name. * * "foo.bar-1.0-SNAPSHOT" to "FooBar" * * @param basename * @return */ public static String artifactToLambda(String basename) { if (basename.endsWith("-SNAPSHOT")) basename = basename.substring(0, basename.length() - "-SNAPSHOT".length()); String name = convertToken(basename, "[^a-zA-Z]"); return name.trim(); } protected static String convertToken(String basename, String token) { String[] splits = basename.split(token); if (splits == null || splits.length == 0) return basename; String name = ""; for (String split : splits) { split = split.trim(); if (split.isEmpty()) continue; name = name + split.substring(0, 1).toUpperCase() + split.substring(1).toLowerCase(); } return name; } public static void writeFile(OutputTargetBuildItem target, String name, String output) throws IOException { Path artifact = target.getOutputDirectory().resolve(name); String targetUri = target.getOutputDirectory().resolve("function.zip").toUri().toString().replace("file:", "fileb:"); output = output.replace("${artifactId}", target.getBaseName()) .replace("${buildDir}", target.getOutputDirectory().toString()) .replace("${targetUri}", targetUri); Files.write(artifact, output.getBytes(StandardCharsets.UTF_8)); } public static String copyResource(String resource) throws Exception { try (InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource)) { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); int nRead; byte[] data = new byte[1024]; while ((nRead = inputStream.read(data, 0, data.length)) != -1) { buffer.write(data, 0, nRead); } buffer.flush(); byte[] byteArray = buffer.toByteArray(); return new String(byteArray, StandardCharsets.UTF_8); } } public static void generateScripts(String handler, OutputTargetBuildItem target) throws Exception { String output = copyResource("lambda/bootstrap-example.sh"); writeExecutableFile(target, "bootstrap-example.sh", output); String lambdaName = artifactToLambda(target.getBaseName()); output = copyResource("lambda/manage.sh") .replace("${handler}", handler) .replace("${lambdaName}", lambdaName); writeExecutableFile(target, "manage.sh", output); output = copyResource("lambda/sam.jvm.yaml") .replace("${handler}", handler) .replace("${lambdaName}", lambdaName); writeFile(target, "sam.jvm.yaml", output); output = copyResource("lambda/sam.native.yaml") .replace("${lambdaName}", lambdaName); writeFile(target, "sam.native.yaml", output); } }
This comment seems out of place, no?
public Enumeration<URL> getResources(String nm) throws IOException { ClassLoaderState state = getState(); String name = sanitizeName(nm); boolean banned = state.bannedResources.contains(name); Set<URL> resources = new LinkedHashSet<>(); if (name.startsWith(META_INF_SERVICES)) { try { Class<?> c = loadClass(name.substring(META_INF_SERVICES.length())); if (c.getClassLoader() == this) { banned = true; } } catch (ClassNotFoundException ignored) { } } for (ClassPathElement i : elements) { ClassPathResource res = i.getResource(nm); if (res != null) { resources.add(res.getUrl()); } } if (!banned) { if (resources.isEmpty() || aggregateParentResources) { Enumeration<URL> res = parent.getResources(nm); while (res.hasMoreElements()) { resources.add(res.nextElement()); } } } return Collections.enumeration(resources); }
public Enumeration<URL> getResources(String nm) throws IOException { ClassLoaderState state = getState(); String name = sanitizeName(nm); boolean banned = state.bannedResources.contains(name); Set<URL> resources = new LinkedHashSet<>(); if (name.startsWith(META_INF_SERVICES)) { try { Class<?> c = loadClass(name.substring(META_INF_SERVICES.length())); if (c.getClassLoader() == this) { banned = true; } } catch (ClassNotFoundException ignored) { } } for (ClassPathElement i : elements) { ClassPathResource res = i.getResource(nm); if (res != null) { resources.add(res.getUrl()); } } if (!banned) { if (resources.isEmpty() || aggregateParentResources) { Enumeration<URL> res = parent.getResources(nm); while (res.hasMoreElements()) { resources.add(res.nextElement()); } } } return Collections.enumeration(resources); }
class loading where it attempts to resolve stuff from the parent super(PLATFORM_CLASS_LOADER); this.name = builder.name; this.elements = builder.elements; this.bytecodeTransformers = builder.bytecodeTransformers; this.bannedElements = builder.bannedElements; this.parentFirstElements = builder.parentFirstElements; this.lesserPriorityElements = builder.lesserPriorityElements; this.parent = builder.parent; this.parentFirst = builder.parentFirst; this.resettableElement = builder.resettableElement; this.transformerClassLoader = builder.transformerClassLoader; this.aggregateParentResources = builder.aggregateParentResources; } public static Builder builder(String name, ClassLoader parent, boolean parentFirst) { return new Builder(name, parent, parentFirst); }
class loading where it attempts to resolve stuff from the parent super(PLATFORM_CLASS_LOADER); this.name = builder.name; this.elements = builder.elements; this.bytecodeTransformers = builder.bytecodeTransformers; this.bannedElements = builder.bannedElements; this.parentFirstElements = builder.parentFirstElements; this.lesserPriorityElements = builder.lesserPriorityElements; this.parent = builder.parent; this.parentFirst = builder.parentFirst; this.resettableElement = builder.resettableElement; this.transformerClassLoader = builder.transformerClassLoader; this.aggregateParentResources = builder.aggregateParentResources; } public static Builder builder(String name, ClassLoader parent, boolean parentFirst) { return new Builder(name, parent, parentFirst); }
Why do we need this exactly?
public Receive createReceive() { return ReceiveBuilder.create() .match( RemoteHandshakeMessage.class, withCleanContextClassLoader(this::handleHandshakeMessage)) .match( ControlMessages.class, withCleanContextClassLoader(this::handleControlMessage)) .matchAny(withCleanContextClassLoader(this::handleMessage)) .build(); }
.matchAny(withCleanContextClassLoader(this::handleMessage))
public Receive createReceive() { return ReceiveBuilder.create() .match(RemoteHandshakeMessage.class, this::handleHandshakeMessage) .match(ControlMessages.class, this::handleControlMessage) .matchAny(this::handleMessage) .build(); }
class AkkaRpcActor<T extends RpcEndpoint & RpcGateway> extends AbstractActor { protected final Logger log = LoggerFactory.getLogger(getClass()); /** the endpoint to invoke the methods on. */ protected final T rpcEndpoint; /** the helper that tracks whether calls come from the main thread. */ private final MainThreadValidatorUtil mainThreadValidator; private final CompletableFuture<Boolean> terminationFuture; private final int version; private final long maximumFramesize; private final AtomicBoolean rpcEndpointStopped; private volatile RpcEndpointTerminationResult rpcEndpointTerminationResult; @Nonnull private State state; AkkaRpcActor( final T rpcEndpoint, final CompletableFuture<Boolean> terminationFuture, final int version, final long maximumFramesize) { checkArgument(maximumFramesize > 0, "Maximum framesize must be positive."); this.rpcEndpoint = checkNotNull(rpcEndpoint, "rpc endpoint"); this.mainThreadValidator = new MainThreadValidatorUtil(rpcEndpoint); this.terminationFuture = checkNotNull(terminationFuture); this.version = version; this.maximumFramesize = maximumFramesize; this.rpcEndpointStopped = new AtomicBoolean(false); this.rpcEndpointTerminationResult = RpcEndpointTerminationResult.failure( new AkkaRpcException( String.format( "RpcEndpoint %s has not been properly stopped.", rpcEndpoint.getEndpointId()))); this.state = StoppedState.STOPPED; } @Override public void postStop() throws Exception { super.postStop(); if (rpcEndpointTerminationResult.isSuccess()) { log.debug("The RpcEndpoint {} terminated successfully.", rpcEndpoint.getEndpointId()); terminationFuture.complete(null); } else { log.info( "The RpcEndpoint {} failed.", rpcEndpoint.getEndpointId(), rpcEndpointTerminationResult.getFailureCause()); terminationFuture.completeExceptionally(rpcEndpointTerminationResult.getFailureCause()); } state = state.finishTermination(); } @Override private static <X> FI.UnitApply<X> withCleanContextClassLoader(Consumer<X> function) { return object -> ClassLoadingUtils.runWithFlinkContextClassLoader(() -> function.accept(object)); } private void handleMessage(final Object message) { if (state.isRunning()) { mainThreadValidator.enterMainThread(); try { handleRpcMessage(message); } finally { mainThreadValidator.exitMainThread(); } } else { log.info( "The rpc endpoint {} has not been started yet. Discarding message {} until processing is started.", rpcEndpoint.getClass().getName(), message.getClass().getName()); sendErrorIfSender( new AkkaRpcException( String.format( "Discard message, because the rpc endpoint %s has not been started yet.", rpcEndpoint.getAddress()))); } } private void handleControlMessage(ControlMessages controlMessage) { try { switch (controlMessage) { case START: state = state.start(this); break; case STOP: state = state.stop(); break; case TERMINATE: state = state.terminate(this); break; default: handleUnknownControlMessage(controlMessage); } } catch (Exception e) { this.rpcEndpointTerminationResult = RpcEndpointTerminationResult.failure(e); throw e; } } private void handleUnknownControlMessage(ControlMessages controlMessage) { final String message = String.format( "Received unknown control message %s. Dropping this message!", controlMessage); log.warn(message); sendErrorIfSender(new AkkaUnknownMessageException(message)); } protected void handleRpcMessage(Object message) { if (message instanceof RunAsync) { handleRunAsync((RunAsync) message); } else if (message instanceof CallAsync) { handleCallAsync((CallAsync) message); } else if (message instanceof RpcInvocation) { handleRpcInvocation((RpcInvocation) message); } else { log.warn( "Received message of unknown type {} with value {}. Dropping this message!", message.getClass().getName(), message); sendErrorIfSender( new AkkaUnknownMessageException( "Received unknown message " + message + " of type " + message.getClass().getSimpleName() + '.')); } } private void handleHandshakeMessage(RemoteHandshakeMessage handshakeMessage) { if (!isCompatibleVersion(handshakeMessage.getVersion())) { sendErrorIfSender( new AkkaHandshakeException( String.format( "Version mismatch between source (%s) and target (%s) rpc component. Please verify that all components have the same version.", handshakeMessage.getVersion(), getVersion()))); } else if (!isGatewaySupported(handshakeMessage.getRpcGateway())) { sendErrorIfSender( new AkkaHandshakeException( String.format( "The rpc endpoint does not support the gateway %s.", handshakeMessage.getRpcGateway().getSimpleName()))); } else { getSender().tell(new Status.Success(HandshakeSuccessMessage.INSTANCE), getSelf()); } } private boolean isGatewaySupported(Class<?> rpcGateway) { return rpcGateway.isAssignableFrom(rpcEndpoint.getClass()); } private boolean isCompatibleVersion(int sourceVersion) { return sourceVersion == getVersion(); } private int getVersion() { return version; } /** * Handle rpc invocations by looking up the rpc method on the rpc endpoint and calling this * method with the provided method arguments. If the method has a return value, it is returned * to the sender of the call. * * @param rpcInvocation Rpc invocation message */ private void handleRpcInvocation(RpcInvocation rpcInvocation) { Method rpcMethod = null; try { String methodName = rpcInvocation.getMethodName(); Class<?>[] parameterTypes = rpcInvocation.getParameterTypes(); rpcMethod = lookupRpcMethod(methodName, parameterTypes); } catch (ClassNotFoundException e) { log.error("Could not load method arguments.", e); RpcConnectionException rpcException = new RpcConnectionException("Could not load method arguments.", e); getSender().tell(new Status.Failure(rpcException), getSelf()); } catch (IOException e) { log.error("Could not deserialize rpc invocation message.", e); RpcConnectionException rpcException = new RpcConnectionException("Could not deserialize rpc invocation message.", e); getSender().tell(new Status.Failure(rpcException), getSelf()); } catch (final NoSuchMethodException e) { log.error("Could not find rpc method for rpc invocation.", e); RpcConnectionException rpcException = new RpcConnectionException("Could not find rpc method for rpc invocation.", e); getSender().tell(new Status.Failure(rpcException), getSelf()); } if (rpcMethod != null) { try { rpcMethod.setAccessible(true); if (rpcMethod.getReturnType().equals(Void.TYPE)) { rpcMethod.invoke(rpcEndpoint, rpcInvocation.getArgs()); } else { final Object result; try { result = rpcMethod.invoke(rpcEndpoint, rpcInvocation.getArgs()); } catch (InvocationTargetException e) { log.debug( "Reporting back error thrown in remote procedure {}", rpcMethod, e); getSender().tell(new Status.Failure(e.getTargetException()), getSelf()); return; } final String methodName = rpcMethod.getName(); if (result instanceof CompletableFuture) { final CompletableFuture<?> responseFuture = (CompletableFuture<?>) result; sendAsyncResponse(responseFuture, methodName); } else { sendSyncResponse(result, methodName); } } } catch (Throwable e) { log.error("Error while executing remote procedure call {}.", rpcMethod, e); getSender().tell(new Status.Failure(e), getSelf()); } } } private void sendSyncResponse(Object response, String methodName) { if (isRemoteSender(getSender())) { Either<AkkaRpcSerializedValue, AkkaRpcException> serializedResult = serializeRemoteResultAndVerifySize(response, methodName); if (serializedResult.isLeft()) { getSender().tell(new Status.Success(serializedResult.left()), getSelf()); } else { getSender().tell(new Status.Failure(serializedResult.right()), getSelf()); } } else { getSender().tell(new Status.Success(response), getSelf()); } } private void sendAsyncResponse(CompletableFuture<?> asyncResponse, String methodName) { final ActorRef sender = getSender(); Promise.DefaultPromise<Object> promise = new Promise.DefaultPromise<>(); FutureUtils.assertNoException( asyncResponse.handle( (value, throwable) -> { if (throwable != null) { promise.failure(throwable); } else { if (isRemoteSender(sender)) { Either<AkkaRpcSerializedValue, AkkaRpcException> serializedResult = serializeRemoteResultAndVerifySize( value, methodName); if (serializedResult.isLeft()) { promise.success(serializedResult.left()); } else { promise.failure(serializedResult.right()); } } else { promise.success(new Status.Success(value)); } } return null; })); Patterns.pipe(promise.future(), getContext().dispatcher()).to(sender); } private boolean isRemoteSender(ActorRef sender) { return !sender.path().address().hasLocalScope(); } private Either<AkkaRpcSerializedValue, AkkaRpcException> serializeRemoteResultAndVerifySize( Object result, String methodName) { try { AkkaRpcSerializedValue serializedResult = AkkaRpcSerializedValue.valueOf(result); long resultSize = serializedResult.getSerializedDataLength(); if (resultSize > maximumFramesize) { return Either.Right( new AkkaRpcException( "The method " + methodName + "'s result size " + resultSize + " exceeds the maximum size " + maximumFramesize + " .")); } else { return Either.Left(serializedResult); } } catch (IOException e) { return Either.Right( new AkkaRpcException( "Failed to serialize the result for RPC call : " + methodName + '.', e)); } } /** * Handle asynchronous {@link Callable}. This method simply executes the given {@link Callable} * in the context of the actor thread. * * @param callAsync Call async message */ private void handleCallAsync(CallAsync callAsync) { try { Object result = callAsync.getCallable().call(); getSender().tell(new Status.Success(result), getSelf()); } catch (Throwable e) { getSender().tell(new Status.Failure(e), getSelf()); } } /** * Handle asynchronous {@link Runnable}. This method simply executes the given {@link Runnable} * in the context of the actor thread. * * @param runAsync Run async message */ private void handleRunAsync(RunAsync runAsync) { final long timeToRun = runAsync.getTimeNanos(); final long delayNanos; if (timeToRun == 0 || (delayNanos = timeToRun - System.nanoTime()) <= 0) { try { runAsync.getRunnable().run(); } catch (Throwable t) { log.error("Caught exception while executing runnable in main thread.", t); ExceptionUtils.rethrowIfFatalErrorOrOOM(t); } } else { FiniteDuration delay = new FiniteDuration(delayNanos, TimeUnit.NANOSECONDS); RunAsync message = new RunAsync(runAsync.getRunnable(), timeToRun); final Object envelopedSelfMessage = envelopeSelfMessage(message); getContext() .system() .scheduler() .scheduleOnce( delay, getSelf(), envelopedSelfMessage, getContext().dispatcher(), ActorRef.noSender()); } } /** * Look up the rpc method on the given {@link RpcEndpoint} instance. * * @param methodName Name of the method * @param parameterTypes Parameter types of the method * @return Method of the rpc endpoint * @throws NoSuchMethodException Thrown if the method with the given name and parameter types * cannot be found at the rpc endpoint */ private Method lookupRpcMethod(final String methodName, final Class<?>[] parameterTypes) throws NoSuchMethodException { return rpcEndpoint.getClass().getMethod(methodName, parameterTypes); } /** * Send throwable to sender if the sender is specified. * * @param throwable to send to the sender */ protected void sendErrorIfSender(Throwable throwable) { if (!getSender().equals(ActorRef.noSender())) { getSender().tell(new Status.Failure(throwable), getSelf()); } } /** * Hook to envelope self messages. * * @param message to envelope * @return enveloped message */ protected Object envelopeSelfMessage(Object message) { return message; } /** Stop the actor immediately. */ private void stop(RpcEndpointTerminationResult rpcEndpointTerminationResult) { if (rpcEndpointStopped.compareAndSet(false, true)) { this.rpcEndpointTerminationResult = rpcEndpointTerminationResult; getContext().stop(getSelf()); } } interface State { default State start(AkkaRpcActor<?> akkaRpcActor) { throw new AkkaRpcInvalidStateException( invalidStateTransitionMessage(StartedState.STARTED)); } default State stop() { throw new AkkaRpcInvalidStateException( invalidStateTransitionMessage(StoppedState.STOPPED)); } default State terminate(AkkaRpcActor<?> akkaRpcActor) { throw new AkkaRpcInvalidStateException( invalidStateTransitionMessage(TerminatingState.TERMINATING)); } default State finishTermination() { return TerminatedState.TERMINATED; } default boolean isRunning() { return false; } default String invalidStateTransitionMessage(State targetState) { return String.format( "AkkaRpcActor is currently in state %s and cannot go into state %s.", this, targetState); } } @SuppressWarnings("Singleton") enum StartedState implements State { STARTED; @Override public State start(AkkaRpcActor<?> akkaRpcActor) { return STARTED; } @Override public State stop() { return StoppedState.STOPPED; } @Override public State terminate(AkkaRpcActor<?> akkaRpcActor) { akkaRpcActor.mainThreadValidator.enterMainThread(); CompletableFuture<Void> terminationFuture; try { terminationFuture = akkaRpcActor.rpcEndpoint.internalCallOnStop(); } catch (Throwable t) { terminationFuture = FutureUtils.completedExceptionally( new AkkaRpcException( String.format( "Failure while stopping RpcEndpoint %s.", akkaRpcActor.rpcEndpoint.getEndpointId()), t)); } finally { akkaRpcActor.mainThreadValidator.exitMainThread(); } terminationFuture.whenComplete( (ignored, throwable) -> akkaRpcActor.stop(RpcEndpointTerminationResult.of(throwable))); return TerminatingState.TERMINATING; } @Override public boolean isRunning() { return true; } } @SuppressWarnings("Singleton") enum StoppedState implements State { STOPPED; @Override public State start(AkkaRpcActor<?> akkaRpcActor) { akkaRpcActor.mainThreadValidator.enterMainThread(); try { akkaRpcActor.rpcEndpoint.internalCallOnStart(); } catch (Throwable throwable) { akkaRpcActor.stop( RpcEndpointTerminationResult.failure( new AkkaRpcException( String.format( "Could not start RpcEndpoint %s.", akkaRpcActor.rpcEndpoint.getEndpointId()), throwable))); } finally { akkaRpcActor.mainThreadValidator.exitMainThread(); } return StartedState.STARTED; } @Override public State stop() { return STOPPED; } @Override public State terminate(AkkaRpcActor<?> akkaRpcActor) { akkaRpcActor.stop(RpcEndpointTerminationResult.success()); return TerminatingState.TERMINATING; } } @SuppressWarnings("Singleton") enum TerminatingState implements State { TERMINATING; @Override public State terminate(AkkaRpcActor<?> akkaRpcActor) { return TERMINATING; } @Override public boolean isRunning() { return true; } } enum TerminatedState implements State { TERMINATED } private static final class RpcEndpointTerminationResult { private static final RpcEndpointTerminationResult SUCCESS = new RpcEndpointTerminationResult(null); @Nullable private final Throwable failureCause; private RpcEndpointTerminationResult(@Nullable Throwable failureCause) { this.failureCause = failureCause; } public boolean isSuccess() { return failureCause == null; } public Throwable getFailureCause() { Preconditions.checkState(failureCause != null); return failureCause; } private static RpcEndpointTerminationResult success() { return SUCCESS; } private static RpcEndpointTerminationResult failure(Throwable failureCause) { return new RpcEndpointTerminationResult(failureCause); } private static RpcEndpointTerminationResult of(@Nullable Throwable failureCause) { if (failureCause == null) { return success(); } else { return failure(failureCause); } } } }
class AkkaRpcActor<T extends RpcEndpoint & RpcGateway> extends AbstractActor { protected final Logger log = LoggerFactory.getLogger(getClass()); /** the endpoint to invoke the methods on. */ protected final T rpcEndpoint; private final ClassLoader flinkClassLoader; /** the helper that tracks whether calls come from the main thread. */ private final MainThreadValidatorUtil mainThreadValidator; private final CompletableFuture<Boolean> terminationFuture; private final int version; private final long maximumFramesize; private final AtomicBoolean rpcEndpointStopped; private volatile RpcEndpointTerminationResult rpcEndpointTerminationResult; @Nonnull private State state; AkkaRpcActor( final T rpcEndpoint, final CompletableFuture<Boolean> terminationFuture, final int version, final long maximumFramesize, final ClassLoader flinkClassLoader) { checkArgument(maximumFramesize > 0, "Maximum framesize must be positive."); this.rpcEndpoint = checkNotNull(rpcEndpoint, "rpc endpoint"); this.flinkClassLoader = checkNotNull(flinkClassLoader); this.mainThreadValidator = new MainThreadValidatorUtil(rpcEndpoint); this.terminationFuture = checkNotNull(terminationFuture); this.version = version; this.maximumFramesize = maximumFramesize; this.rpcEndpointStopped = new AtomicBoolean(false); this.rpcEndpointTerminationResult = RpcEndpointTerminationResult.failure( new AkkaRpcException( String.format( "RpcEndpoint %s has not been properly stopped.", rpcEndpoint.getEndpointId()))); this.state = StoppedState.STOPPED; } @Override public void postStop() throws Exception { super.postStop(); if (rpcEndpointTerminationResult.isSuccess()) { log.debug("The RpcEndpoint {} terminated successfully.", rpcEndpoint.getEndpointId()); terminationFuture.complete(null); } else { log.info( "The RpcEndpoint {} failed.", rpcEndpoint.getEndpointId(), rpcEndpointTerminationResult.getFailureCause()); terminationFuture.completeExceptionally(rpcEndpointTerminationResult.getFailureCause()); } state = state.finishTermination(); } @Override private void handleMessage(final Object message) { if (state.isRunning()) { mainThreadValidator.enterMainThread(); try { handleRpcMessage(message); } finally { mainThreadValidator.exitMainThread(); } } else { log.info( "The rpc endpoint {} has not been started yet. Discarding message {} until processing is started.", rpcEndpoint.getClass().getName(), message.getClass().getName()); sendErrorIfSender( new AkkaRpcException( String.format( "Discard message, because the rpc endpoint %s has not been started yet.", rpcEndpoint.getAddress()))); } } private void handleControlMessage(ControlMessages controlMessage) { try { switch (controlMessage) { case START: state = state.start(this, flinkClassLoader); break; case STOP: state = state.stop(); break; case TERMINATE: state = state.terminate(this, flinkClassLoader); break; default: handleUnknownControlMessage(controlMessage); } } catch (Exception e) { this.rpcEndpointTerminationResult = RpcEndpointTerminationResult.failure(e); throw e; } } private void handleUnknownControlMessage(ControlMessages controlMessage) { final String message = String.format( "Received unknown control message %s. Dropping this message!", controlMessage); log.warn(message); sendErrorIfSender(new AkkaUnknownMessageException(message)); } protected void handleRpcMessage(Object message) { if (message instanceof RunAsync) { handleRunAsync((RunAsync) message); } else if (message instanceof CallAsync) { handleCallAsync((CallAsync) message); } else if (message instanceof RpcInvocation) { handleRpcInvocation((RpcInvocation) message); } else { log.warn( "Received message of unknown type {} with value {}. Dropping this message!", message.getClass().getName(), message); sendErrorIfSender( new AkkaUnknownMessageException( "Received unknown message " + message + " of type " + message.getClass().getSimpleName() + '.')); } } private void handleHandshakeMessage(RemoteHandshakeMessage handshakeMessage) { if (!isCompatibleVersion(handshakeMessage.getVersion())) { sendErrorIfSender( new AkkaHandshakeException( String.format( "Version mismatch between source (%s) and target (%s) rpc component. Please verify that all components have the same version.", handshakeMessage.getVersion(), getVersion()))); } else if (!isGatewaySupported(handshakeMessage.getRpcGateway())) { sendErrorIfSender( new AkkaHandshakeException( String.format( "The rpc endpoint does not support the gateway %s.", handshakeMessage.getRpcGateway().getSimpleName()))); } else { getSender().tell(new Status.Success(HandshakeSuccessMessage.INSTANCE), getSelf()); } } private boolean isGatewaySupported(Class<?> rpcGateway) { return rpcGateway.isAssignableFrom(rpcEndpoint.getClass()); } private boolean isCompatibleVersion(int sourceVersion) { return sourceVersion == getVersion(); } private int getVersion() { return version; } /** * Handle rpc invocations by looking up the rpc method on the rpc endpoint and calling this * method with the provided method arguments. If the method has a return value, it is returned * to the sender of the call. * * @param rpcInvocation Rpc invocation message */ private void handleRpcInvocation(RpcInvocation rpcInvocation) { Method rpcMethod = null; try { String methodName = rpcInvocation.getMethodName(); Class<?>[] parameterTypes = rpcInvocation.getParameterTypes(); rpcMethod = lookupRpcMethod(methodName, parameterTypes); } catch (ClassNotFoundException e) { log.error("Could not load method arguments.", e); RpcConnectionException rpcException = new RpcConnectionException("Could not load method arguments.", e); getSender().tell(new Status.Failure(rpcException), getSelf()); } catch (IOException e) { log.error("Could not deserialize rpc invocation message.", e); RpcConnectionException rpcException = new RpcConnectionException("Could not deserialize rpc invocation message.", e); getSender().tell(new Status.Failure(rpcException), getSelf()); } catch (final NoSuchMethodException e) { log.error("Could not find rpc method for rpc invocation.", e); RpcConnectionException rpcException = new RpcConnectionException("Could not find rpc method for rpc invocation.", e); getSender().tell(new Status.Failure(rpcException), getSelf()); } if (rpcMethod != null) { try { rpcMethod.setAccessible(true); final Method capturedRpcMethod = rpcMethod; if (rpcMethod.getReturnType().equals(Void.TYPE)) { runWithContextClassLoader( () -> capturedRpcMethod.invoke(rpcEndpoint, rpcInvocation.getArgs()), flinkClassLoader); } else { final Object result; try { result = runWithContextClassLoader( () -> capturedRpcMethod.invoke( rpcEndpoint, rpcInvocation.getArgs()), flinkClassLoader); } catch (InvocationTargetException e) { log.debug( "Reporting back error thrown in remote procedure {}", rpcMethod, e); getSender().tell(new Status.Failure(e.getTargetException()), getSelf()); return; } final String methodName = rpcMethod.getName(); if (result instanceof CompletableFuture) { final CompletableFuture<?> responseFuture = (CompletableFuture<?>) result; sendAsyncResponse(responseFuture, methodName); } else { sendSyncResponse(result, methodName); } } } catch (Throwable e) { log.error("Error while executing remote procedure call {}.", rpcMethod, e); getSender().tell(new Status.Failure(e), getSelf()); } } } private void sendSyncResponse(Object response, String methodName) { if (isRemoteSender(getSender())) { Either<AkkaRpcSerializedValue, AkkaRpcException> serializedResult = serializeRemoteResultAndVerifySize(response, methodName); if (serializedResult.isLeft()) { getSender().tell(new Status.Success(serializedResult.left()), getSelf()); } else { getSender().tell(new Status.Failure(serializedResult.right()), getSelf()); } } else { getSender().tell(new Status.Success(response), getSelf()); } } private void sendAsyncResponse(CompletableFuture<?> asyncResponse, String methodName) { final ActorRef sender = getSender(); Promise.DefaultPromise<Object> promise = new Promise.DefaultPromise<>(); FutureUtils.assertNoException( asyncResponse.handle( (value, throwable) -> { if (throwable != null) { promise.failure(throwable); } else { if (isRemoteSender(sender)) { Either<AkkaRpcSerializedValue, AkkaRpcException> serializedResult = serializeRemoteResultAndVerifySize( value, methodName); if (serializedResult.isLeft()) { promise.success(serializedResult.left()); } else { promise.failure(serializedResult.right()); } } else { promise.success(new Status.Success(value)); } } return null; })); Patterns.pipe(promise.future(), getContext().dispatcher()).to(sender); } private boolean isRemoteSender(ActorRef sender) { return !sender.path().address().hasLocalScope(); } private Either<AkkaRpcSerializedValue, AkkaRpcException> serializeRemoteResultAndVerifySize( Object result, String methodName) { try { AkkaRpcSerializedValue serializedResult = AkkaRpcSerializedValue.valueOf(result); long resultSize = serializedResult.getSerializedDataLength(); if (resultSize > maximumFramesize) { return Either.Right( new AkkaRpcException( "The method " + methodName + "'s result size " + resultSize + " exceeds the maximum size " + maximumFramesize + " .")); } else { return Either.Left(serializedResult); } } catch (IOException e) { return Either.Right( new AkkaRpcException( "Failed to serialize the result for RPC call : " + methodName + '.', e)); } } /** * Handle asynchronous {@link Callable}. This method simply executes the given {@link Callable} * in the context of the actor thread. * * @param callAsync Call async message */ private void handleCallAsync(CallAsync callAsync) { try { Object result = runWithContextClassLoader( () -> callAsync.getCallable().call(), flinkClassLoader); getSender().tell(new Status.Success(result), getSelf()); } catch (Throwable e) { getSender().tell(new Status.Failure(e), getSelf()); } } /** * Handle asynchronous {@link Runnable}. This method simply executes the given {@link Runnable} * in the context of the actor thread. * * @param runAsync Run async message */ private void handleRunAsync(RunAsync runAsync) { final long timeToRun = runAsync.getTimeNanos(); final long delayNanos; if (timeToRun == 0 || (delayNanos = timeToRun - System.nanoTime()) <= 0) { try { runWithContextClassLoader(() -> runAsync.getRunnable().run(), flinkClassLoader); } catch (Throwable t) { log.error("Caught exception while executing runnable in main thread.", t); ExceptionUtils.rethrowIfFatalErrorOrOOM(t); } } else { FiniteDuration delay = new FiniteDuration(delayNanos, TimeUnit.NANOSECONDS); RunAsync message = new RunAsync(runAsync.getRunnable(), timeToRun); final Object envelopedSelfMessage = envelopeSelfMessage(message); getContext() .system() .scheduler() .scheduleOnce( delay, getSelf(), envelopedSelfMessage, getContext().dispatcher(), ActorRef.noSender()); } } /** * Look up the rpc method on the given {@link RpcEndpoint} instance. * * @param methodName Name of the method * @param parameterTypes Parameter types of the method * @return Method of the rpc endpoint * @throws NoSuchMethodException Thrown if the method with the given name and parameter types * cannot be found at the rpc endpoint */ private Method lookupRpcMethod(final String methodName, final Class<?>[] parameterTypes) throws NoSuchMethodException { return rpcEndpoint.getClass().getMethod(methodName, parameterTypes); } /** * Send throwable to sender if the sender is specified. * * @param throwable to send to the sender */ protected void sendErrorIfSender(Throwable throwable) { if (!getSender().equals(ActorRef.noSender())) { getSender().tell(new Status.Failure(throwable), getSelf()); } } /** * Hook to envelope self messages. * * @param message to envelope * @return enveloped message */ protected Object envelopeSelfMessage(Object message) { return message; } /** Stop the actor immediately. */ private void stop(RpcEndpointTerminationResult rpcEndpointTerminationResult) { if (rpcEndpointStopped.compareAndSet(false, true)) { this.rpcEndpointTerminationResult = rpcEndpointTerminationResult; getContext().stop(getSelf()); } } interface State { default State start(AkkaRpcActor<?> akkaRpcActor, ClassLoader flinkClassLoader) { throw new AkkaRpcInvalidStateException( invalidStateTransitionMessage(StartedState.STARTED)); } default State stop() { throw new AkkaRpcInvalidStateException( invalidStateTransitionMessage(StoppedState.STOPPED)); } default State terminate(AkkaRpcActor<?> akkaRpcActor, ClassLoader flinkClassLoader) { throw new AkkaRpcInvalidStateException( invalidStateTransitionMessage(TerminatingState.TERMINATING)); } default State finishTermination() { return TerminatedState.TERMINATED; } default boolean isRunning() { return false; } default String invalidStateTransitionMessage(State targetState) { return String.format( "AkkaRpcActor is currently in state %s and cannot go into state %s.", this, targetState); } } @SuppressWarnings("Singleton") enum StartedState implements State { STARTED; @Override public State start(AkkaRpcActor<?> akkaRpcActor, ClassLoader flinkClassLoader) { return STARTED; } @Override public State stop() { return StoppedState.STOPPED; } @Override public State terminate(AkkaRpcActor<?> akkaRpcActor, ClassLoader flinkClassLoader) { akkaRpcActor.mainThreadValidator.enterMainThread(); CompletableFuture<Void> terminationFuture; try { terminationFuture = runWithContextClassLoader( () -> akkaRpcActor.rpcEndpoint.internalCallOnStop(), flinkClassLoader); } catch (Throwable t) { terminationFuture = FutureUtils.completedExceptionally( new AkkaRpcException( String.format( "Failure while stopping RpcEndpoint %s.", akkaRpcActor.rpcEndpoint.getEndpointId()), t)); } finally { akkaRpcActor.mainThreadValidator.exitMainThread(); } terminationFuture.whenComplete( (ignored, throwable) -> akkaRpcActor.stop(RpcEndpointTerminationResult.of(throwable))); return TerminatingState.TERMINATING; } @Override public boolean isRunning() { return true; } } @SuppressWarnings("Singleton") enum StoppedState implements State { STOPPED; @Override public State start(AkkaRpcActor<?> akkaRpcActor, ClassLoader flinkClassLoader) { akkaRpcActor.mainThreadValidator.enterMainThread(); try { runWithContextClassLoader( () -> akkaRpcActor.rpcEndpoint.internalCallOnStart(), flinkClassLoader); } catch (Throwable throwable) { akkaRpcActor.stop( RpcEndpointTerminationResult.failure( new AkkaRpcException( String.format( "Could not start RpcEndpoint %s.", akkaRpcActor.rpcEndpoint.getEndpointId()), throwable))); } finally { akkaRpcActor.mainThreadValidator.exitMainThread(); } return StartedState.STARTED; } @Override public State stop() { return STOPPED; } @Override public State terminate(AkkaRpcActor<?> akkaRpcActor, ClassLoader flinkClassLoader) { akkaRpcActor.stop(RpcEndpointTerminationResult.success()); return TerminatingState.TERMINATING; } } @SuppressWarnings("Singleton") enum TerminatingState implements State { TERMINATING; @Override public State terminate(AkkaRpcActor<?> akkaRpcActor, ClassLoader flinkClassLoader) { return TERMINATING; } @Override public boolean isRunning() { return true; } } enum TerminatedState implements State { TERMINATED } private static final class RpcEndpointTerminationResult { private static final RpcEndpointTerminationResult SUCCESS = new RpcEndpointTerminationResult(null); @Nullable private final Throwable failureCause; private RpcEndpointTerminationResult(@Nullable Throwable failureCause) { this.failureCause = failureCause; } public boolean isSuccess() { return failureCause == null; } public Throwable getFailureCause() { Preconditions.checkState(failureCause != null); return failureCause; } private static RpcEndpointTerminationResult success() { return SUCCESS; } private static RpcEndpointTerminationResult failure(Throwable failureCause) { return new RpcEndpointTerminationResult(failureCause); } private static RpcEndpointTerminationResult of(@Nullable Throwable failureCause) { if (failureCause == null) { return success(); } else { return failure(failureCause); } } } }
I see 2 paths above for assigning to `from`: ``` String from = String.format("%s/v%s/%s.zip", GITHUB_DOWNLOAD_PREFIX, getSDKVersion(), buildFileName()); if (!Strings.isNullOrEmpty(options.getPrismLocation())) { checkArgument( !options.getPrismLocation().startsWith(GITHUB_TAG_PREFIX), "Provided --prismLocation URL is not an Apache Beam Github " + "Release page URL or download URL: ", from); from = options.getPrismLocation(); } ``` In either of these assignments are followed, won't `from` start with either `GITHUB_DOWNLOAD_PREFIX` or `GITHUB_TAG_PREFIX` which both start with `https://`?
String resolve() throws IOException { String from = String.format("%s/v%s/%s.zip", GITHUB_DOWNLOAD_PREFIX, getSDKVersion(), buildFileName()); if (!Strings.isNullOrEmpty(options.getPrismLocation())) { checkArgument( !options.getPrismLocation().startsWith(GITHUB_TAG_PREFIX), "Provided --prismLocation URL is not an Apache Beam Github " + "Release page URL or download URL: ", from); from = options.getPrismLocation(); } String fromFileName = getNameWithoutExtension(from); Path to = Paths.get(userHome(), PRISM_BIN_PATH, fromFileName); if (Files.exists(to)) { return to.toString(); } createDirectoryIfNeeded(to); if (from.startsWith("http")) { String result = resolve(new URL(from), to); checkState(Files.exists(to), "Resolved location does not exist: %s", result); return result; } String result = resolve(Paths.get(from), to); checkState(Files.exists(to), "Resolved location does not exist: %s", result); return result; }
if (from.startsWith("http")) {
String resolve() throws IOException { String from = String.format("%s/v%s/%s.zip", GITHUB_DOWNLOAD_PREFIX, getSDKVersion(), buildFileName()); if (!Strings.isNullOrEmpty(options.getPrismLocation())) { checkArgument( !options.getPrismLocation().startsWith(GITHUB_TAG_PREFIX), "Provided --prismLocation URL is not an Apache Beam Github " + "Release page URL or download URL: ", from); from = options.getPrismLocation(); } String fromFileName = getNameWithoutExtension(from); Path to = Paths.get(userHome(), PRISM_BIN_PATH, fromFileName); if (Files.exists(to)) { return to.toString(); } createDirectoryIfNeeded(to); if (from.startsWith("http")) { String result = resolve(new URL(from), to); checkState(Files.exists(to), "Resolved location does not exist: %s", result); return result; } String result = resolve(Paths.get(from), to); checkState(Files.exists(to), "Resolved location does not exist: %s", result); return result; }
class PrismLocator { static final String OS_NAME_PROPERTY = "os.name"; static final String ARCH_PROPERTY = "os.arch"; static final String USER_HOME_PROPERTY = "user.home"; private static final String ZIP_EXT = "zip"; private static final String SHA512_EXT = "sha512"; private static final ReleaseInfo RELEASE_INFO = ReleaseInfo.getReleaseInfo(); private static final String PRISM_BIN_PATH = ".apache_beam/cache/prism/bin"; private static final Set<PosixFilePermission> PERMS = PosixFilePermissions.fromString("rwxr-xr-x"); private static final String GITHUB_DOWNLOAD_PREFIX = "https: private static final String GITHUB_TAG_PREFIX = "https: private final PrismPipelineOptions options; PrismLocator(PrismPipelineOptions options) { this.options = options; } /** * Downloads and prepares a Prism executable for use with the {@link PrismRunner}, executed by the * {@link PrismExecutor}. The returned {@link String} is the absolute path to the Prism * executable. */ private String resolve(URL from, Path to) throws IOException { if (from.toString().startsWith(GITHUB_DOWNLOAD_PREFIX)) { URL shaSumReference = new URL(from + "." + SHA512_EXT); validateShaSum512(shaSumReference, from); } BiConsumer<URL, Path> downloadFn = PrismLocator::download; if (from.getPath().endsWith(ZIP_EXT)) { downloadFn = PrismLocator::unzip; } downloadFn.accept(from, to); Files.setPosixFilePermissions(to, PERMS); return to.toString(); } private String resolve(Path from, Path to) throws IOException { BiConsumer<InputStream, Path> copyFn = PrismLocator::copy; if (from.endsWith(ZIP_EXT)) { copyFn = PrismLocator::unzip; } copyFn.accept(from.toUri().toURL().openStream(), to); ByteStreams.copy(from.toUri().toURL().openStream(), Files.newOutputStream(to)); Files.setPosixFilePermissions(to, PERMS); return to.toString(); } String buildFileName() { String version = getSDKVersion(); return String.format("apache_beam-v%s-prism-%s-%s", version, os(), arch()); } private static void unzip(URL from, Path to) { try { unzip(from.openStream(), to); } catch (IOException e) { throw new RuntimeException(e); } } private static void unzip(InputStream from, Path to) { try (OutputStream out = Files.newOutputStream(to)) { ZipInputStream zis = new ZipInputStream(from); for (ZipEntry entry = zis.getNextEntry(); entry != null; entry = zis.getNextEntry()) { InputStream in = ByteStreams.limit(zis, entry.getSize()); ByteStreams.copy(in, out); } } catch (IOException e) { throw new RuntimeException(e); } } private static void copy(InputStream from, Path to) { try { ByteStreams.copy(from, Files.newOutputStream(to)); } catch (IOException e) { throw new RuntimeException(e); } } private static void download(URL from, Path to) { try { ByteStreams.copy(from.openStream(), Files.newOutputStream(to)); } catch (IOException e) { throw new RuntimeException(e); } } private static void validateShaSum512(URL shaSumReference, URL source) throws IOException { try (InputStream in = shaSumReference.openStream()) { String rawContent = new String(ByteStreams.toByteArray(in), StandardCharsets.UTF_8); checkState(!Strings.isNullOrEmpty(rawContent)); String reference = ""; Iterator<String> split = Splitter.onPattern("\\s+").split(rawContent).iterator(); if (split.hasNext()) { reference = split.next(); } checkState(!Strings.isNullOrEmpty(reference)); HashCode toVerify = Hashing.sha512().hashBytes(ByteStreams.toByteArray(source.openStream())); checkState( reference.equals(toVerify.toString()), "Expected sha512 derived from: %s does not equal expected: %s, got: %s", source, reference, toVerify.toString()); } } private static String getNameWithoutExtension(String path) { return org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.io.Files .getNameWithoutExtension(path); } private String getSDKVersion() { if (Strings.isNullOrEmpty(options.getPrismVersionOverride())) { return RELEASE_INFO.getSdkVersion(); } return options.getPrismVersionOverride(); } private static String os() { String result = mustGetPropertyAsLowerCase(OS_NAME_PROPERTY); if (result.contains("mac")) { return "darwin"; } return result; } private static String arch() { String result = mustGetPropertyAsLowerCase(ARCH_PROPERTY); if (result.contains("aarch")) { return "arm64"; } return result; } private static String userHome() { return mustGetPropertyAsLowerCase(USER_HOME_PROPERTY); } private static String mustGetPropertyAsLowerCase(String name) { return checkStateNotNull(System.getProperty(name), "System property: " + name + " not set") .toLowerCase(); } private static void createDirectoryIfNeeded(Path path) throws IOException { Path parent = path.getParent(); if (parent == null) { return; } if (parent.toFile().exists()) { return; } Files.createDirectories(parent); } }
class PrismLocator { static final String OS_NAME_PROPERTY = "os.name"; static final String ARCH_PROPERTY = "os.arch"; static final String USER_HOME_PROPERTY = "user.home"; private static final String ZIP_EXT = "zip"; private static final ReleaseInfo RELEASE_INFO = ReleaseInfo.getReleaseInfo(); private static final String PRISM_BIN_PATH = ".apache_beam/cache/prism/bin"; private static final Set<PosixFilePermission> PERMS = PosixFilePermissions.fromString("rwxr-xr-x"); private static final String GITHUB_DOWNLOAD_PREFIX = "https: private static final String GITHUB_TAG_PREFIX = "https: private final PrismPipelineOptions options; PrismLocator(PrismPipelineOptions options) { this.options = options; } /** * Downloads and prepares a Prism executable for use with the {@link PrismRunner}. The returned * {@link String} is the absolute path to the Prism executable. */ static Path prismBinDirectory() { return Paths.get(userHome(), PRISM_BIN_PATH); } private String resolve(URL from, Path to) throws IOException { BiConsumer<URL, Path> downloadFn = PrismLocator::download; if (from.getPath().endsWith(ZIP_EXT)) { downloadFn = PrismLocator::unzip; } downloadFn.accept(from, to); Files.setPosixFilePermissions(to, PERMS); return to.toString(); } private String resolve(Path from, Path to) throws IOException { BiConsumer<InputStream, Path> copyFn = PrismLocator::copy; if (from.endsWith(ZIP_EXT)) { copyFn = PrismLocator::unzip; } copyFn.accept(from.toUri().toURL().openStream(), to); ByteStreams.copy(from.toUri().toURL().openStream(), Files.newOutputStream(to)); Files.setPosixFilePermissions(to, PERMS); return to.toString(); } String buildFileName() { String version = getSDKVersion(); return String.format("apache_beam-v%s-prism-%s-%s", version, os(), arch()); } private static void unzip(URL from, Path to) { try { unzip(from.openStream(), to); } catch (IOException e) { throw new RuntimeException(e); } } private static void unzip(InputStream from, Path to) { try (OutputStream out = Files.newOutputStream(to)) { ZipInputStream zis = new ZipInputStream(from); for (ZipEntry entry = zis.getNextEntry(); entry != null; entry = zis.getNextEntry()) { InputStream in = ByteStreams.limit(zis, entry.getSize()); ByteStreams.copy(in, out); } } catch (IOException e) { throw new RuntimeException(e); } } private static void copy(InputStream from, Path to) { try { ByteStreams.copy(from, Files.newOutputStream(to)); } catch (IOException e) { throw new RuntimeException(e); } } private static void download(URL from, Path to) { try { ByteStreams.copy(from.openStream(), Files.newOutputStream(to)); } catch (IOException e) { throw new RuntimeException(e); } } private static String getNameWithoutExtension(String path) { return org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.io.Files .getNameWithoutExtension(path); } private String getSDKVersion() { if (Strings.isNullOrEmpty(options.getPrismVersionOverride())) { return RELEASE_INFO.getSdkVersion(); } return options.getPrismVersionOverride(); } private static String os() { String result = mustGetPropertyAsLowerCase(OS_NAME_PROPERTY); if (result.contains("mac")) { return "darwin"; } return result; } private static String arch() { String result = mustGetPropertyAsLowerCase(ARCH_PROPERTY); if (result.contains("aarch")) { return "arm64"; } return result; } private static String userHome() { return mustGetPropertyAsLowerCase(USER_HOME_PROPERTY); } private static String mustGetPropertyAsLowerCase(String name) { return checkStateNotNull(System.getProperty(name), "System property: " + name + " not set") .toLowerCase(); } private static void createDirectoryIfNeeded(Path path) throws IOException { Path parent = path.getParent(); if (parent == null) { return; } Files.createDirectories(parent); } }
I don't think the comment `// this is not used any more in the new scheduler` is needed. It would not be consistent to comment here because there is another invocation of this constructor where we do not comment. I would remove the parameters `slotProvider` and `slotRequestTimeout` from the `DefaultScheduler`, and if possible pass a _"throwing"_ implementation/invalid value to `SchedulerBase`.
public DefaultScheduler build() throws Exception { return new DefaultScheduler( log, jobGraph, backPressureStatsTracker, ioExecutor, jobMasterConfiguration, new SimpleSlotProvider(jobGraph.getJobID(), 0), futureExecutor, delayExecutor, userCodeLoader, checkpointRecoveryFactory, rpcTimeout, blobWriter, jobManagerJobMetricGroup, DEFAULT_TIMEOUT, shuffleMaster, partitionTracker, schedulingStrategyFactory, failoverStrategyFactory, restartBackoffTimeStrategy, executionVertexOperations, executionVertexVersioner, executionSlotAllocatorFactory); }
new SimpleSlotProvider(jobGraph.getJobID(), 0),
public DefaultScheduler build() throws Exception { return new DefaultScheduler( log, jobGraph, backPressureStatsTracker, ioExecutor, jobMasterConfiguration, futureExecutor, delayExecutor, userCodeLoader, checkpointRecoveryFactory, rpcTimeout, blobWriter, jobManagerJobMetricGroup, shuffleMaster, partitionTracker, schedulingStrategyFactory, failoverStrategyFactory, restartBackoffTimeStrategy, executionVertexOperations, executionVertexVersioner, executionSlotAllocatorFactory); }
class DefaultSchedulerBuilder { private final JobGraph jobGraph; private SchedulingStrategyFactory schedulingStrategyFactory; private Logger log = LOG; private BackPressureStatsTracker backPressureStatsTracker = VoidBackPressureStatsTracker.INSTANCE; private Executor ioExecutor = java.util.concurrent.Executors.newSingleThreadExecutor(); private Configuration jobMasterConfiguration = new Configuration(); private ScheduledExecutorService futureExecutor = new DirectScheduledExecutorService(); private ScheduledExecutor delayExecutor = new ScheduledExecutorServiceAdapter(futureExecutor); private ClassLoader userCodeLoader = getClass().getClassLoader(); private CheckpointRecoveryFactory checkpointRecoveryFactory = new StandaloneCheckpointRecoveryFactory(); private Time rpcTimeout = DEFAULT_TIMEOUT; private BlobWriter blobWriter = VoidBlobWriter.getInstance(); private JobManagerJobMetricGroup jobManagerJobMetricGroup = UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup(); private ShuffleMaster<?> shuffleMaster = NettyShuffleMaster.INSTANCE; private JobMasterPartitionTracker partitionTracker = NoOpJobMasterPartitionTracker.INSTANCE; private FailoverStrategy.Factory failoverStrategyFactory = new RestartPipelinedRegionFailoverStrategy.Factory(); private RestartBackoffTimeStrategy restartBackoffTimeStrategy = NoRestartBackoffTimeStrategy.INSTANCE; private ExecutionVertexOperations executionVertexOperations = new DefaultExecutionVertexOperations(); private ExecutionVertexVersioner executionVertexVersioner = new ExecutionVertexVersioner(); private ExecutionSlotAllocatorFactory executionSlotAllocatorFactory = new TestExecutionSlotAllocatorFactory(); private DefaultSchedulerBuilder(final JobGraph jobGraph) { this.jobGraph = jobGraph; this.schedulingStrategyFactory = DefaultSchedulerFactory.createSchedulingStrategyFactory(jobGraph.getScheduleMode()); } public DefaultSchedulerBuilder setLogger(final Logger log) { this.log = log; return this; } public DefaultSchedulerBuilder setBackPressureStatsTracker(final BackPressureStatsTracker backPressureStatsTracker) { this.backPressureStatsTracker = backPressureStatsTracker; return this; } public DefaultSchedulerBuilder setIoExecutor(final Executor ioExecutor) { this.ioExecutor = ioExecutor; return this; } public DefaultSchedulerBuilder setJobMasterConfiguration(final Configuration jobMasterConfiguration) { this.jobMasterConfiguration = jobMasterConfiguration; return this; } public DefaultSchedulerBuilder setFutureExecutor(final ScheduledExecutorService futureExecutor) { this.futureExecutor = futureExecutor; return this; } public DefaultSchedulerBuilder setDelayExecutor(final ScheduledExecutor delayExecutor) { this.delayExecutor = delayExecutor; return this; } public DefaultSchedulerBuilder setUserCodeLoader(final ClassLoader userCodeLoader) { this.userCodeLoader = userCodeLoader; return this; } public DefaultSchedulerBuilder setCheckpointRecoveryFactory(final CheckpointRecoveryFactory checkpointRecoveryFactory) { this.checkpointRecoveryFactory = checkpointRecoveryFactory; return this; } public DefaultSchedulerBuilder setRpcTimeout(final Time rpcTimeout) { this.rpcTimeout = rpcTimeout; return this; } public DefaultSchedulerBuilder setBlobWriter(final BlobWriter blobWriter) { this.blobWriter = blobWriter; return this; } public DefaultSchedulerBuilder setJobManagerJobMetricGroup(final JobManagerJobMetricGroup jobManagerJobMetricGroup) { this.jobManagerJobMetricGroup = jobManagerJobMetricGroup; return this; } public DefaultSchedulerBuilder setShuffleMaster(final ShuffleMaster<?> shuffleMaster) { this.shuffleMaster = shuffleMaster; return this; } public DefaultSchedulerBuilder setPartitionTracker(final JobMasterPartitionTracker partitionTracker) { this.partitionTracker = partitionTracker; return this; } public DefaultSchedulerBuilder setSchedulingStrategyFactory(final SchedulingStrategyFactory schedulingStrategyFactory) { this.schedulingStrategyFactory = schedulingStrategyFactory; return this; } public DefaultSchedulerBuilder setFailoverStrategyFactory(final FailoverStrategy.Factory failoverStrategyFactory) { this.failoverStrategyFactory = failoverStrategyFactory; return this; } public DefaultSchedulerBuilder setRestartBackoffTimeStrategy(final RestartBackoffTimeStrategy restartBackoffTimeStrategy) { this.restartBackoffTimeStrategy = restartBackoffTimeStrategy; return this; } public DefaultSchedulerBuilder setExecutionVertexOperations(final ExecutionVertexOperations executionVertexOperations) { this.executionVertexOperations = executionVertexOperations; return this; } public DefaultSchedulerBuilder setExecutionVertexVersioner(final ExecutionVertexVersioner executionVertexVersioner) { this.executionVertexVersioner = executionVertexVersioner; return this; } public DefaultSchedulerBuilder setExecutionSlotAllocatorFactory(final ExecutionSlotAllocatorFactory executionSlotAllocatorFactory) { this.executionSlotAllocatorFactory = executionSlotAllocatorFactory; return this; } }
class DefaultSchedulerBuilder { private final JobGraph jobGraph; private SchedulingStrategyFactory schedulingStrategyFactory; private Logger log = LOG; private BackPressureStatsTracker backPressureStatsTracker = VoidBackPressureStatsTracker.INSTANCE; private Executor ioExecutor = TestingUtils.defaultExecutor(); private Configuration jobMasterConfiguration = new Configuration(); private ScheduledExecutorService futureExecutor = TestingUtils.defaultExecutor(); private ScheduledExecutor delayExecutor = new ScheduledExecutorServiceAdapter(futureExecutor); private ClassLoader userCodeLoader = ClassLoader.getSystemClassLoader(); private CheckpointRecoveryFactory checkpointRecoveryFactory = new StandaloneCheckpointRecoveryFactory(); private Time rpcTimeout = DEFAULT_TIMEOUT; private BlobWriter blobWriter = VoidBlobWriter.getInstance(); private JobManagerJobMetricGroup jobManagerJobMetricGroup = UnregisteredMetricGroups.createUnregisteredJobManagerJobMetricGroup(); private ShuffleMaster<?> shuffleMaster = NettyShuffleMaster.INSTANCE; private JobMasterPartitionTracker partitionTracker = NoOpJobMasterPartitionTracker.INSTANCE; private FailoverStrategy.Factory failoverStrategyFactory = new RestartPipelinedRegionFailoverStrategy.Factory(); private RestartBackoffTimeStrategy restartBackoffTimeStrategy = NoRestartBackoffTimeStrategy.INSTANCE; private ExecutionVertexOperations executionVertexOperations = new DefaultExecutionVertexOperations(); private ExecutionVertexVersioner executionVertexVersioner = new ExecutionVertexVersioner(); private ExecutionSlotAllocatorFactory executionSlotAllocatorFactory = new TestExecutionSlotAllocatorFactory(); private DefaultSchedulerBuilder(final JobGraph jobGraph) { this.jobGraph = jobGraph; this.schedulingStrategyFactory = DefaultSchedulerFactory.createSchedulingStrategyFactory(jobGraph.getScheduleMode()); } public DefaultSchedulerBuilder setLogger(final Logger log) { this.log = log; return this; } public DefaultSchedulerBuilder setBackPressureStatsTracker(final BackPressureStatsTracker backPressureStatsTracker) { this.backPressureStatsTracker = backPressureStatsTracker; return this; } public DefaultSchedulerBuilder setIoExecutor(final Executor ioExecutor) { this.ioExecutor = ioExecutor; return this; } public DefaultSchedulerBuilder setJobMasterConfiguration(final Configuration jobMasterConfiguration) { this.jobMasterConfiguration = jobMasterConfiguration; return this; } public DefaultSchedulerBuilder setFutureExecutor(final ScheduledExecutorService futureExecutor) { this.futureExecutor = futureExecutor; return this; } public DefaultSchedulerBuilder setDelayExecutor(final ScheduledExecutor delayExecutor) { this.delayExecutor = delayExecutor; return this; } public DefaultSchedulerBuilder setUserCodeLoader(final ClassLoader userCodeLoader) { this.userCodeLoader = userCodeLoader; return this; } public DefaultSchedulerBuilder setCheckpointRecoveryFactory(final CheckpointRecoveryFactory checkpointRecoveryFactory) { this.checkpointRecoveryFactory = checkpointRecoveryFactory; return this; } public DefaultSchedulerBuilder setRpcTimeout(final Time rpcTimeout) { this.rpcTimeout = rpcTimeout; return this; } public DefaultSchedulerBuilder setBlobWriter(final BlobWriter blobWriter) { this.blobWriter = blobWriter; return this; } public DefaultSchedulerBuilder setJobManagerJobMetricGroup(final JobManagerJobMetricGroup jobManagerJobMetricGroup) { this.jobManagerJobMetricGroup = jobManagerJobMetricGroup; return this; } public DefaultSchedulerBuilder setShuffleMaster(final ShuffleMaster<?> shuffleMaster) { this.shuffleMaster = shuffleMaster; return this; } public DefaultSchedulerBuilder setPartitionTracker(final JobMasterPartitionTracker partitionTracker) { this.partitionTracker = partitionTracker; return this; } public DefaultSchedulerBuilder setSchedulingStrategyFactory(final SchedulingStrategyFactory schedulingStrategyFactory) { this.schedulingStrategyFactory = schedulingStrategyFactory; return this; } public DefaultSchedulerBuilder setFailoverStrategyFactory(final FailoverStrategy.Factory failoverStrategyFactory) { this.failoverStrategyFactory = failoverStrategyFactory; return this; } public DefaultSchedulerBuilder setRestartBackoffTimeStrategy(final RestartBackoffTimeStrategy restartBackoffTimeStrategy) { this.restartBackoffTimeStrategy = restartBackoffTimeStrategy; return this; } public DefaultSchedulerBuilder setExecutionVertexOperations(final ExecutionVertexOperations executionVertexOperations) { this.executionVertexOperations = executionVertexOperations; return this; } public DefaultSchedulerBuilder setExecutionVertexVersioner(final ExecutionVertexVersioner executionVertexVersioner) { this.executionVertexVersioner = executionVertexVersioner; return this; } public DefaultSchedulerBuilder setExecutionSlotAllocatorFactory(final ExecutionSlotAllocatorFactory executionSlotAllocatorFactory) { this.executionSlotAllocatorFactory = executionSlotAllocatorFactory; return this; } }
Shall we move this to a constant?
public static Object removeIfHasKey(Strand strand, MapValue<?, ?> m, String k) { String op = "removeIfHasKey()"; checkIsMapOnlyOperation(m.getType(), op); checkValidFieldForRecord(m, k, op); try { return m.remove(k); } catch (org.ballerinalang.jvm.util.exceptions.BLangFreezeException e) { throw BallerinaErrors.createError(e.getMessage(), "Failed to remove element from map: " + e.getDetail()); } }
String op = "removeIfHasKey()";
public static Object removeIfHasKey(Strand strand, MapValue<?, ?> m, String k) { String op = Constants.REMOVE_IF_HAS_KEY; checkIsMapOnlyOperation(m.getType(), op); checkValidFieldForRecord(m, k, op); try { return m.remove(k); } catch (org.ballerinalang.jvm.util.exceptions.BLangFreezeException e) { throw BallerinaErrors.createError(e.getMessage(), "Failed to remove element from map: " + e.getDetail()); } }
class RemoveIfHasKey { }
class RemoveIfHasKey { }
Hmm I wonder how are we going to handle parallel artifact upload streams in the future. Fine to leave as a TODO for now, I suspect it'd require some small API changes.
public void onNext(PutArtifactRequest putArtifactRequest) { if (metadata == null) { metadata = putArtifactRequest.getMetadata(); try { ResourceId artifactsDirId = getArtifactDirResourceId( putArtifactRequest.getMetadata().getStagingSessionToken()); LOG.info("Going to stage artifact {} in {}.", metadata.getMetadata().getName(), artifactsDirId); artifactId = artifactsDirId .resolve(encodedFileName(metadata.getMetadata()), StandardResolveOptions.RESOLVE_FILE); artifactWritableByteChannel = FileSystems.create(artifactId, MimeTypes.BINARY); } catch (IOException e) { LOG.error("Staging failed for artifact {} for staging token {}", encodedFileName(metadata.getMetadata()), metadata.getStagingSessionToken()); outboundObserver.onError(e); } } else { try { artifactWritableByteChannel .write(putArtifactRequest.getData().getData().asReadOnlyByteBuffer()); } catch (IOException e) { LOG.error("Staging failed for artifact {} to file {}.", metadata.getMetadata().getName(), artifactId); outboundObserver.onError(e); } } }
public void onNext(PutArtifactRequest putArtifactRequest) { if (metadata == null) { checkNotNull(putArtifactRequest); checkNotNull(putArtifactRequest.getMetadata()); metadata = putArtifactRequest.getMetadata(); try { ResourceId artifactsDirId = getArtifactDirResourceId( putArtifactRequest.getMetadata().getStagingSessionToken()); artifactId = artifactsDirId.resolve(encodedFileName(metadata.getMetadata()), StandardResolveOptions.RESOLVE_FILE); LOG.info("Going to stage artifact {} to {}.", metadata.getMetadata().getName(), artifactId); artifactWritableByteChannel = FileSystems.create(artifactId, MimeTypes.BINARY); } catch (Exception e) { LOG.error("Staging failed for artifact {} for staging token {}", encodedFileName(metadata.getMetadata()), metadata.getStagingSessionToken()); outboundObserver.onError(e); } } else { try { artifactWritableByteChannel .write(putArtifactRequest.getData().getData().asReadOnlyByteBuffer()); } catch (IOException e) { LOG.error("Staging failed for artifact {} to file {}.", metadata.getMetadata().getName(), artifactId); outboundObserver.onError(e); } } }
class PutArtifactStreamObserver implements StreamObserver<PutArtifactRequest> { private final StreamObserver<PutArtifactResponse> outboundObserver; private PutArtifactMetadata metadata; private ResourceId artifactId; private WritableByteChannel artifactWritableByteChannel; PutArtifactStreamObserver(StreamObserver<PutArtifactResponse> outboundObserver) { this.outboundObserver = outboundObserver; } @Override @Override public void onError(Throwable throwable) { LOG.error("Staging artifact failed for " + artifactId, throwable); try { if (artifactWritableByteChannel != null) { artifactWritableByteChannel.close(); } if (artifactId != null) { FileSystems.delete(Collections.singletonList(artifactId), StandardMoveOptions.IGNORE_MISSING_FILES); } } catch (IOException e) { LOG.error("Unable to save artifact {}", artifactId); outboundObserver.onError(e); return; } outboundObserver.onCompleted(); } @Override public void onCompleted() { LOG.info("Staging artifact completed for " + artifactId); if (artifactWritableByteChannel != null) { try { artifactWritableByteChannel.close(); } catch (IOException e) { onError(e); return; } } outboundObserver.onCompleted(); } }
class PutArtifactStreamObserver implements StreamObserver<PutArtifactRequest> { private final StreamObserver<PutArtifactResponse> outboundObserver; private PutArtifactMetadata metadata; private ResourceId artifactId; private WritableByteChannel artifactWritableByteChannel; PutArtifactStreamObserver(StreamObserver<PutArtifactResponse> outboundObserver) { this.outboundObserver = outboundObserver; } @Override @Override public void onError(Throwable throwable) { LOG.error("Staging artifact failed for " + artifactId, throwable); try { if (artifactWritableByteChannel != null) { artifactWritableByteChannel.close(); } if (artifactId != null) { FileSystems.delete(Collections.singletonList(artifactId), StandardMoveOptions.IGNORE_MISSING_FILES); } } catch (IOException e) { LOG.error("Unable to save artifact {}", artifactId); outboundObserver.onError(e); return; } outboundObserver.onCompleted(); } @Override public void onCompleted() { LOG.info("Staging artifact completed for " + artifactId); if (artifactWritableByteChannel != null) { try { artifactWritableByteChannel.close(); } catch (IOException e) { onError(e); return; } } outboundObserver.onCompleted(); } }
If we're using this private method in `restRead` too, it should go below it to be more readable. Could you move this?
private void testWrite() { Pipeline pipeline = Pipeline.create(options); BigQueryIO.Write.Method method = BigQueryIO.Write.Method.valueOf(writeMethod); pipeline .apply("Read from source", Read.from(new SyntheticBoundedSource(sourceOptions))) .apply("Gather time", ParDo.of(new TimeMonitor<>(NAMESPACE, WRITE_TIME_METRIC_NAME))) .apply("Map records", ParDo.of(new MapKVToV())) .apply( "Write to BQ", BigQueryIO.<byte[]>write() .to(tableQualifier) .withFormatFunction( input -> { TableRow tableRow = new TableRow(); tableRow.set("data", input); return tableRow; }) .withCustomGcsTempLocation(ValueProvider.StaticValueProvider.of(tempRoot)) .withMethod(method) .withSchema( new TableSchema() .setFields( Collections.singletonList( new TableFieldSchema().setName("data").setType("BYTES"))))); PipelineResult pipelineResult = pipeline.run(); pipelineResult.waitUntilFinish(); extractAndPublishTime(pipelineResult, WRITE_TIME_METRIC_NAME); }
.apply("Read from source", Read.from(new SyntheticBoundedSource(sourceOptions)))
private void testWrite() { Pipeline pipeline = Pipeline.create(options); BigQueryIO.Write.Method method = BigQueryIO.Write.Method.valueOf(options.getWriteMethod()); pipeline .apply("Read from source", Read.from(new SyntheticBoundedSource(sourceOptions))) .apply("Gather time", ParDo.of(new TimeMonitor<>(NAMESPACE, WRITE_TIME_METRIC_NAME))) .apply("Map records", ParDo.of(new MapKVToV())) .apply( "Write to BQ", BigQueryIO.<byte[]>write() .to(tableQualifier) .withFormatFunction( input -> { TableRow tableRow = new TableRow(); tableRow.set("data", input); return tableRow; }) .withCustomGcsTempLocation(ValueProvider.StaticValueProvider.of(tempRoot)) .withMethod(method) .withSchema( new TableSchema() .setFields( Collections.singletonList( new TableFieldSchema().setName("data").setType("BYTES"))))); PipelineResult pipelineResult = pipeline.run(); pipelineResult.waitUntilFinish(); extractAndPublishTime(pipelineResult, WRITE_TIME_METRIC_NAME); }
class BigQueryIOIT { private static final String NAMESPACE = BigQueryIOIT.class.getName(); private static String metricsBigQueryTable; private static String metricsBigQueryDataset; private static String testBigQueryDataset; private static String testBigQueryTable; private static SyntheticSourceOptions sourceOptions; private static String tableQualifier; private static String tempRoot; private static String writeMethod; private static BigQueryPerfTestOptions options; private static final String TEST_ID = UUID.randomUUID().toString(); private static final String TEST_TIMESTAMP = Timestamp.now().toString(); private static final String READ_TIME_METRIC_NAME = "read_time"; private static final String WRITE_TIME_METRIC_NAME = "write_time"; /** Options for this io performance test. */ public interface BigQueryPerfTestOptions extends IOTestPipelineOptions { @Description("Synthetic source options") @Validation.Required String getSourceOptions(); void setSourceOptions(String value); @Description("BQ dataset for the test data") String getTestBigQueryDataset(); void setTestBigQueryDataset(String dataset); @Description("BQ table for test data") String getTestBigQueryTable(); void setTestBigQueryTable(String table); @Description("BQ dataset for the metrics data") String getMetricsBigQueryDataset(); void setMetricsBigQueryDataset(String dataset); @Description("BQ table for metrics data") String getMetricsBigQueryTable(); void setMetricsBigQueryTable(String table); @Description("Should test use streaming writes or batch loads to BQ") String getWriteMethod(); void setWriteMethod(Boolean value); } @BeforeClass public static void setup() throws IOException { options = IOITHelper.readIOTestPipelineOptions(BigQueryPerfTestOptions.class); tempRoot = options.getTempRoot(); sourceOptions = SyntheticOptions.fromJsonString(options.getSourceOptions(), SyntheticSourceOptions.class); metricsBigQueryDataset = options.getMetricsBigQueryDataset(); metricsBigQueryTable = options.getMetricsBigQueryTable(); testBigQueryDataset = options.getTestBigQueryDataset(); testBigQueryTable = options.getTestBigQueryTable(); writeMethod = options.getWriteMethod(); BigQueryOptions bigQueryOptions = BigQueryOptions.newBuilder().build(); tableQualifier = String.format( "%s:%s.%s", bigQueryOptions.getProjectId(), testBigQueryDataset, testBigQueryTable); } @AfterClass public static void tearDown() { BigQueryOptions options = BigQueryOptions.newBuilder().build(); BigQuery client = options.getService(); TableId tableId = TableId.of(options.getProjectId(), testBigQueryDataset, testBigQueryTable); client.delete(tableId); } @Test public void testWriteThenRead() { testWrite(); testRead(); } private void extractAndPublishTime(PipelineResult pipelineResult, String writeTimeMetricName) { NamedTestResult metricResult = getMetricSupplier(writeTimeMetricName).apply(new MetricsReader(pipelineResult, NAMESPACE)); IOITMetrics.publish( TEST_ID, TEST_TIMESTAMP, metricsBigQueryDataset, metricsBigQueryTable, Collections.singletonList(metricResult)); } private void testRead() { Pipeline pipeline = Pipeline.create(options); pipeline .apply("Read from BQ", BigQueryIO.readTableRows().from(tableQualifier)) .apply("Gather time", ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC_NAME))); PipelineResult result = pipeline.run(); result.waitUntilFinish(); extractAndPublishTime(result, READ_TIME_METRIC_NAME); } private static Function<MetricsReader, NamedTestResult> getMetricSupplier(String metricName) { return reader -> { long startTime = reader.getStartTimeMetric(metricName); long endTime = reader.getEndTimeMetric(metricName); return NamedTestResult.create( TEST_ID, TEST_TIMESTAMP, metricName, (endTime - startTime) / 1e3); }; } private static class MapKVToV extends DoFn<KV<byte[], byte[]>, byte[]> { @ProcessElement public void process(ProcessContext context) { context.output(context.element().getValue()); } } }
class BigQueryIOIT { private static final String NAMESPACE = BigQueryIOIT.class.getName(); private static final String TEST_ID = UUID.randomUUID().toString(); private static final String TEST_TIMESTAMP = Timestamp.now().toString(); private static final String READ_TIME_METRIC_NAME = "read_time"; private static final String WRITE_TIME_METRIC_NAME = "write_time"; private static String metricsBigQueryTable; private static String metricsBigQueryDataset; private static String testBigQueryDataset; private static String testBigQueryTable; private static SyntheticSourceOptions sourceOptions; private static String tableQualifier; private static String tempRoot; private static BigQueryPerfTestOptions options; @BeforeClass public static void setup() throws IOException { options = IOITHelper.readIOTestPipelineOptions(BigQueryPerfTestOptions.class); tempRoot = options.getTempRoot(); sourceOptions = SyntheticOptions.fromJsonString(options.getSourceOptions(), SyntheticSourceOptions.class); metricsBigQueryDataset = options.getMetricsBigQueryDataset(); metricsBigQueryTable = options.getMetricsBigQueryTable(); testBigQueryDataset = options.getTestBigQueryDataset(); testBigQueryTable = options.getTestBigQueryTable(); BigQueryOptions bigQueryOptions = BigQueryOptions.newBuilder().build(); tableQualifier = String.format( "%s:%s.%s", bigQueryOptions.getProjectId(), testBigQueryDataset, testBigQueryTable); } @AfterClass public static void tearDown() { BigQueryOptions options = BigQueryOptions.newBuilder().build(); BigQuery client = options.getService(); TableId tableId = TableId.of(options.getProjectId(), testBigQueryDataset, testBigQueryTable); client.delete(tableId); } @Test public void testWriteThenRead() { testWrite(); testRead(); } private void testRead() { Pipeline pipeline = Pipeline.create(options); pipeline .apply("Read from BQ", BigQueryIO.readTableRows().from(tableQualifier)) .apply("Gather time", ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC_NAME))); PipelineResult result = pipeline.run(); result.waitUntilFinish(); extractAndPublishTime(result, READ_TIME_METRIC_NAME); } private void extractAndPublishTime(PipelineResult pipelineResult, String writeTimeMetricName) { NamedTestResult metricResult = getMetricSupplier(writeTimeMetricName).apply(new MetricsReader(pipelineResult, NAMESPACE)); IOITMetrics.publish( TEST_ID, TEST_TIMESTAMP, metricsBigQueryDataset, metricsBigQueryTable, Collections.singletonList(metricResult)); } private static Function<MetricsReader, NamedTestResult> getMetricSupplier(String metricName) { return reader -> { long startTime = reader.getStartTimeMetric(metricName); long endTime = reader.getEndTimeMetric(metricName); return NamedTestResult.create( TEST_ID, TEST_TIMESTAMP, metricName, (endTime - startTime) / 1e3); }; } /** Options for this io performance test. */ public interface BigQueryPerfTestOptions extends IOTestPipelineOptions { @Description("Synthetic source options") @Validation.Required String getSourceOptions(); void setSourceOptions(String value); @Description("BQ dataset for the test data") String getTestBigQueryDataset(); void setTestBigQueryDataset(String dataset); @Description("BQ table for test data") String getTestBigQueryTable(); void setTestBigQueryTable(String table); @Description("BQ dataset for the metrics data") String getMetricsBigQueryDataset(); void setMetricsBigQueryDataset(String dataset); @Description("BQ table for metrics data") String getMetricsBigQueryTable(); void setMetricsBigQueryTable(String table); @Description("Should test use streaming writes or batch loads to BQ") String getWriteMethod(); void setWriteMethod(Boolean value); } private static class MapKVToV extends DoFn<KV<byte[], byte[]>, byte[]> { @ProcessElement public void process(ProcessContext context) { context.output(context.element().getValue()); } } }
This logic won't be required anymore.
private void setCurrentReader(int index) { Preconditions.checkArgument(index != currentSourceIndex); if (currentReader != null) { try { currentReader.close(); } catch (Exception e) { throw new RuntimeException("Failed to close current reader", e); } LOG.debug( "Reader closed: subtask={} sourceIndex={} currentReader={}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); } Source source = switchedSources.sourceOf(index); SourceReader<T, ?> reader; try { reader = source.createReader(readerContext); } catch (Exception e) { throw new RuntimeException("Failed tp create reader", e); } reader.start(); currentSourceIndex = index; currentReader = reader; completeAndResetAvailabilityHelper(); LOG.debug( "Reader started: subtask={} sourceIndex={} {}", readerContext.getIndexOfSubtask(), currentSourceIndex, reader); if (!restoredSplits.isEmpty()) { List<HybridSourceSplit> splits = new ArrayList<>(restoredSplits.size()); Iterator<HybridSourceSplit> it = restoredSplits.iterator(); while (it.hasNext()) { HybridSourceSplit hybridSplit = it.next(); if (hybridSplit.sourceIndex() == index) { splits.add(hybridSplit); it.remove(); } } addSplits(splits); } }
completeAndResetAvailabilityHelper();
private void setCurrentReader(int index) { Preconditions.checkArgument(index != currentSourceIndex); if (currentReader != null) { try { currentReader.close(); } catch (Exception e) { throw new RuntimeException("Failed to close current reader", e); } LOG.debug( "Reader closed: subtask={} sourceIndex={} currentReader={}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); } Source source = switchedSources.sourceOf(index); SourceReader<T, ?> reader; try { reader = source.createReader(readerContext); } catch (Exception e) { throw new RuntimeException("Failed tp create reader", e); } reader.start(); currentSourceIndex = index; currentReader = reader; availabilityFuture.complete(null); LOG.debug( "Reader started: subtask={} sourceIndex={} {}", readerContext.getIndexOfSubtask(), currentSourceIndex, reader); if (!restoredSplits.isEmpty()) { List<HybridSourceSplit> splits = new ArrayList<>(restoredSplits.size()); Iterator<HybridSourceSplit> it = restoredSplits.iterator(); while (it.hasNext()) { HybridSourceSplit hybridSplit = it.next(); if (hybridSplit.sourceIndex() == index) { splits.add(hybridSplit); it.remove(); } } addSplits(splits); } }
class HybridSourceReader<T> implements SourceReader<T, HybridSourceSplit> { private static final Logger LOG = LoggerFactory.getLogger(HybridSourceReader.class); private final SourceReaderContext readerContext; private final SwitchedSources switchedSources = new SwitchedSources(); private int currentSourceIndex = -1; private boolean isFinalSource; private SourceReader<T, ? extends SourceSplit> currentReader; private List<HybridSourceSplit> restoredSplits = new ArrayList<>(); private MultipleFuturesAvailabilityHelper availabilityHelper = new MultipleFuturesAvailabilityHelper(0); public HybridSourceReader(SourceReaderContext readerContext) { this.readerContext = readerContext; } @Override public void start() { int initialSourceIndex = currentSourceIndex; if (!restoredSplits.isEmpty()) { initialSourceIndex = restoredSplits.get(0).sourceIndex() - 1; } readerContext.sendSourceEventToCoordinator( new SourceReaderFinishedEvent(initialSourceIndex)); } @Override public InputStatus pollNext(ReaderOutput output) throws Exception { if (currentReader == null) { return InputStatus.NOTHING_AVAILABLE; } InputStatus status = currentReader.pollNext(output); if (status == InputStatus.END_OF_INPUT) { LOG.info( "End of input subtask={} sourceIndex={} {}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); readerContext.sendSourceEventToCoordinator( new SourceReaderFinishedEvent(currentSourceIndex)); if (!isFinalSource) { return InputStatus.NOTHING_AVAILABLE; } } return status; } @Override public List<HybridSourceSplit> snapshotState(long checkpointId) { List<? extends SourceSplit> state = currentReader != null ? currentReader.snapshotState(checkpointId) : Collections.emptyList(); return HybridSourceSplit.wrapSplits(state, currentSourceIndex, switchedSources); } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { if (currentReader != null) { currentReader.notifyCheckpointComplete(checkpointId); } } @Override public void notifyCheckpointAborted(long checkpointId) throws Exception { if (currentReader != null) { currentReader.notifyCheckpointAborted(checkpointId); } } @Override public CompletableFuture<Void> isAvailable() { availabilityHelper.resetToUnAvailable(); if (currentReader == null) { return (CompletableFuture<Void>) availabilityHelper.getAvailableFuture(); } else { Preconditions.checkArgument( availabilityHelper.getSize() == 1, "Availability helper is out of sync for current reader: %s", currentReader); availabilityHelper.anyOf(0, currentReader.isAvailable()); return (CompletableFuture<Void>) availabilityHelper.getAvailableFuture(); } } @Override public void addSplits(List<HybridSourceSplit> splits) { LOG.info( "Adding splits subtask={} sourceIndex={} currentReader={} {}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader, splits); if (currentSourceIndex < 0) { restoredSplits.addAll(splits); } else { List<SourceSplit> realSplits = new ArrayList<>(splits.size()); for (HybridSourceSplit split : splits) { Preconditions.checkState( split.sourceIndex() == currentSourceIndex, "Split %s while current source is %s", split, currentSourceIndex); realSplits.add(HybridSourceSplit.unwrapSplit(split, switchedSources)); } currentReader.addSplits((List) realSplits); } } @Override public void notifyNoMoreSplits() { if (currentReader != null) { currentReader.notifyNoMoreSplits(); } LOG.debug( "No more splits for subtask={} sourceIndex={} currentReader={}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); } @Override public void handleSourceEvents(SourceEvent sourceEvent) { if (sourceEvent instanceof SwitchSourceEvent) { SwitchSourceEvent sse = (SwitchSourceEvent) sourceEvent; LOG.info( "Switch source event: subtask={} sourceIndex={} source={}", readerContext.getIndexOfSubtask(), sse.sourceIndex(), sse.source()); switchedSources.put(sse.sourceIndex(), sse.source()); setCurrentReader(sse.sourceIndex()); isFinalSource = sse.isFinalSource(); } else { currentReader.handleSourceEvents(sourceEvent); } } @Override public void close() throws Exception { if (currentReader != null) { currentReader.close(); } LOG.debug( "Reader closed: subtask={} sourceIndex={} currentReader={}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); } private void completeAndResetAvailabilityHelper() { availabilityHelper.notifyCompletion(); if (currentReader == null) { availabilityHelper = new MultipleFuturesAvailabilityHelper(0); } else { availabilityHelper = new MultipleFuturesAvailabilityHelper(1); availabilityHelper.anyOf(0, currentReader.isAvailable()); } } @VisibleForTesting MultipleFuturesAvailabilityHelper getAvailabilityHelper() { return availabilityHelper; } }
class HybridSourceReader<T> implements SourceReader<T, HybridSourceSplit> { private static final Logger LOG = LoggerFactory.getLogger(HybridSourceReader.class); private final SourceReaderContext readerContext; private final SwitchedSources switchedSources = new SwitchedSources(); private int currentSourceIndex = -1; private boolean isFinalSource; private SourceReader<T, ? extends SourceSplit> currentReader; private CompletableFuture<Void> availabilityFuture = new CompletableFuture<>(); private List<HybridSourceSplit> restoredSplits = new ArrayList<>(); public HybridSourceReader(SourceReaderContext readerContext) { this.readerContext = readerContext; } @Override public void start() { int initialSourceIndex = currentSourceIndex; if (!restoredSplits.isEmpty()) { initialSourceIndex = restoredSplits.get(0).sourceIndex() - 1; } readerContext.sendSourceEventToCoordinator( new SourceReaderFinishedEvent(initialSourceIndex)); } @Override public InputStatus pollNext(ReaderOutput output) throws Exception { if (currentReader == null) { return InputStatus.NOTHING_AVAILABLE; } InputStatus status = currentReader.pollNext(output); if (status == InputStatus.END_OF_INPUT) { LOG.info( "End of input subtask={} sourceIndex={} {}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); readerContext.sendSourceEventToCoordinator( new SourceReaderFinishedEvent(currentSourceIndex)); if (!isFinalSource) { return InputStatus.NOTHING_AVAILABLE; } } return status; } @Override public List<HybridSourceSplit> snapshotState(long checkpointId) { List<? extends SourceSplit> state = currentReader != null ? currentReader.snapshotState(checkpointId) : Collections.emptyList(); return HybridSourceSplit.wrapSplits(state, currentSourceIndex, switchedSources); } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { if (currentReader != null) { currentReader.notifyCheckpointComplete(checkpointId); } } @Override public void notifyCheckpointAborted(long checkpointId) throws Exception { if (currentReader != null) { currentReader.notifyCheckpointAborted(checkpointId); } } @Override public CompletableFuture<Void> isAvailable() { if (availabilityFuture.isDone()) { availabilityFuture = currentReader.isAvailable(); } return availabilityFuture; } @Override public void addSplits(List<HybridSourceSplit> splits) { LOG.info( "Adding splits subtask={} sourceIndex={} currentReader={} {}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader, splits); if (currentSourceIndex < 0) { restoredSplits.addAll(splits); } else { List<SourceSplit> realSplits = new ArrayList<>(splits.size()); for (HybridSourceSplit split : splits) { Preconditions.checkState( split.sourceIndex() == currentSourceIndex, "Split %s while current source is %s", split, currentSourceIndex); realSplits.add(HybridSourceSplit.unwrapSplit(split, switchedSources)); } currentReader.addSplits((List) realSplits); } } @Override public void notifyNoMoreSplits() { if (currentReader != null) { currentReader.notifyNoMoreSplits(); } LOG.debug( "No more splits for subtask={} sourceIndex={} currentReader={}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); } @Override public void handleSourceEvents(SourceEvent sourceEvent) { if (sourceEvent instanceof SwitchSourceEvent) { SwitchSourceEvent sse = (SwitchSourceEvent) sourceEvent; LOG.info( "Switch source event: subtask={} sourceIndex={} source={}", readerContext.getIndexOfSubtask(), sse.sourceIndex(), sse.source()); switchedSources.put(sse.sourceIndex(), sse.source()); setCurrentReader(sse.sourceIndex()); isFinalSource = sse.isFinalSource(); } else { currentReader.handleSourceEvents(sourceEvent); } } @Override public void close() throws Exception { if (currentReader != null) { currentReader.close(); } LOG.debug( "Reader closed: subtask={} sourceIndex={} currentReader={}", readerContext.getIndexOfSubtask(), currentSourceIndex, currentReader); } }
Shall we call `analyzeNode` instead? ```suggestion conversionExpr.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, env)); ```
public void visit(BLangTypeConversionExpr conversionExpr) { analyzeExpr(conversionExpr.expr); conversionExpr.annAttachments.forEach(annotationAttachment -> annotationAttachment.accept(this)); }
conversionExpr.annAttachments.forEach(annotationAttachment -> annotationAttachment.accept(this));
public void visit(BLangTypeConversionExpr conversionExpr) { analyzeExpr(conversionExpr.expr); conversionExpr.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, env)); }
class CodeAnalyzer extends BLangNodeVisitor { private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY = new CompilerContext.Key<>(); private static final String NULL_LITERAL = "null"; private final SymbolResolver symResolver; private int loopCount; private int transactionCount; private boolean statementReturns; private boolean lastStatement; private boolean withinRetryBlock; private boolean withinLockBlock; private int workerCount; private SymbolTable symTable; private Types types; private BLangDiagnosticLogHelper dlog; private TypeChecker typeChecker; private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>(); private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>(); private BLangNode parent; private Names names; private SymbolEnv env; private final Stack<LinkedHashSet<BType>> returnTypes = new Stack<>(); private boolean withinAbortedBlock; private boolean withinCommittedBlock; private boolean isJSONContext; private boolean enableExperimentalFeatures; public static CodeAnalyzer getInstance(CompilerContext context) { CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY); if (codeGenerator == null) { codeGenerator = new CodeAnalyzer(context); } return codeGenerator; } public CodeAnalyzer(CompilerContext context) { context.put(CODE_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLogHelper.getInstance(context); this.typeChecker = TypeChecker.getInstance(context); this.names = Names.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.enableExperimentalFeatures = Boolean.parseBoolean( CompilerOptions.getInstance(context).get(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED)); } private void resetFunction() { this.resetStatementReturns(); } private void resetStatementReturns() { this.statementReturns = false; } private void resetLastStatement() { this.lastStatement = false; } public BLangPackage analyze(BLangPackage pkgNode) { pkgNode.accept(this); return pkgNode; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) { return; } parent = pkgNode; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol); analyzeTopLevelNodes(pkgNode, pkgEnv); pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage)); } private void analyzeTopLevelNodes(BLangPackage pkgNode, SymbolEnv pkgEnv) { pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv)); pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE); parent = null; } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); parent = myParent; this.env = prevEnv; } private void analyzeTypeNode(BLangType node, SymbolEnv env) { if (node == null) { return; } analyzeNode(node, env); } @Override public void visit(BLangCompilationUnit compUnitNode) { compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env)); } public void visit(BLangTypeDefinition typeDefinition) { analyzeTypeNode(typeDefinition.typeNode, this.env); typeDefinition.annAttachments.forEach(annotationAttachment -> annotationAttachment.accept(this)); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { analyzeNode(bLangTupleVariableDef.var, this.env); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { analyzeNode(bLangRecordVariableDef.var, this.env); } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { analyzeNode(bLangErrorVariableDef.errorVariable, this.env); } @Override public void visit(BLangFunction funcNode) { boolean isLambda = funcNode.flagSet.contains(Flag.LAMBDA); if (isLambda) { return; } validateParams(funcNode); if (Symbols.isPublic(funcNode.symbol)) { funcNode.symbol.params.forEach(symbol -> analyzeExportableTypeRef(funcNode.symbol, symbol.type.tsymbol, true, funcNode.pos)); if (funcNode.symbol.restParam != null) { analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.restParam.type.tsymbol, true, funcNode.restParam.pos); } analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.retType.tsymbol, true, funcNode.returnTypeNode.pos); } this.validateMainFunction(funcNode); this.validateModuleInitFunction(funcNode); try { this.initNewWorkerActionSystem(); this.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME, funcNode.pos, funcNode); this.visitFunction(funcNode); this.workerActionSystemStack.peek().endWorkerActionStateMachine(); } finally { this.finalizeCurrentWorkerActionSystem(); } funcNode.annAttachments.forEach(annotationAttachment -> annotationAttachment.accept(this)); } private void validateParams(BLangFunction funcNode) { funcNode.requiredParams.forEach(param -> analyzeNode(param, env)); if (funcNode.restParam != null) { analyzeNode(funcNode.restParam, env); } } private void visitFunction(BLangFunction funcNode) { SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); this.returnWithintransactionCheckStack.push(true); this.doneWithintransactionCheckStack.push(true); this.returnTypes.push(new LinkedHashSet<>()); this.resetFunction(); if (Symbols.isNative(funcNode.symbol)) { return; } if (isPublicInvokableNode(funcNode)) { analyzeNode(funcNode.returnTypeNode, invokableEnv); } /* the body can be null in the case of Object type function declarations */ if (funcNode.body != null) { analyzeNode(funcNode.body, invokableEnv); boolean isNilableReturn = funcNode.symbol.type.getReturnType().isNullable(); if (!isNilableReturn && !this.statementReturns) { this.dlog.error(funcNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN, funcNode.getKind().toString().toLowerCase()); } } this.returnTypes.pop(); this.returnWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); } private boolean isPublicInvokableNode(BLangInvokableNode invNode) { return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) || Symbols.isPublic(invNode.symbol.owner)); } @Override public void visit(BLangBlockFunctionBody body) { final SymbolEnv blockEnv = SymbolEnv.createFuncBodyEnv(body, env); for (BLangStatement e : body.stmts) { analyzeNode(e, blockEnv); } this.resetLastStatement(); } @Override public void visit(BLangExprFunctionBody body) { analyzeExpr(body.expr); this.statementReturns = true; this.resetLastStatement(); } @Override public void visit(BLangExternalFunctionBody body) { } @Override public void visit(BLangForkJoin forkJoin) { if (forkJoin.workers.isEmpty()) { dlog.error(forkJoin.pos, DiagnosticCode.INVALID_FOR_JOIN_SYNTAX_EMPTY_FORK); } } @Override public void visit(BLangWorker worker) { /* ignore, remove later */ } @Override public void visit(BLangEndpoint endpointNode) { } @Override public void visit(BLangTransaction transactionNode) { checkExperimentalFeatureValidity(ExperimentalFeatures.TRANSACTIONS, transactionNode.pos); this.checkStatementExecutionValidity(transactionNode); if (!isValidTransactionBlock()) { this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER); return; } this.loopWithintransactionCheckStack.push(false); this.returnWithintransactionCheckStack.push(false); this.doneWithintransactionCheckStack.push(false); this.transactionCount++; if (this.transactionCount > 1) { this.dlog.error(transactionNode.pos, DiagnosticCode.NESTED_TRANSACTIONS_ARE_INVALID); } analyzeNode(transactionNode.transactionBody, env); this.transactionCount--; this.resetLastStatement(); if (transactionNode.onRetryBody != null) { this.withinRetryBlock = true; analyzeNode(transactionNode.onRetryBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinRetryBlock = false; } if (transactionNode.abortedBody != null) { this.withinAbortedBlock = true; analyzeNode(transactionNode.abortedBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinAbortedBlock = false; } if (transactionNode.committedBody != null) { this.withinCommittedBlock = true; analyzeNode(transactionNode.committedBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinCommittedBlock = false; } this.returnWithintransactionCheckStack.pop(); this.loopWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); analyzeExpr(transactionNode.retryCount); } @Override public void visit(BLangAbort abortNode) { if (this.transactionCount == 0) { this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } @Override public void visit(BLangRetry retryNode) { if (this.transactionCount == 0) { this.dlog.error(retryNode.pos, DiagnosticCode.RETRY_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } private void checkUnreachableCode(BLangStatement stmt) { if (this.statementReturns) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetStatementReturns(); } if (lastStatement) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetLastStatement(); } } private void checkStatementExecutionValidity(BLangStatement stmt) { this.checkUnreachableCode(stmt); } @Override public void visit(BLangBlockStmt blockNode) { final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv)); this.resetLastStatement(); } @Override public void visit(BLangReturn returnStmt) { this.checkStatementExecutionValidity(returnStmt); if (checkReturnValidityInTransaction()) { this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.statementReturns = true; analyzeExpr(returnStmt.expr); this.returnTypes.peek().add(returnStmt.expr.type); } @Override public void visit(BLangIf ifStmt) { this.checkStatementExecutionValidity(ifStmt); analyzeNode(ifStmt.body, env); boolean ifStmtReturns = this.statementReturns; this.resetStatementReturns(); if (ifStmt.elseStmt != null) { analyzeNode(ifStmt.elseStmt, env); this.statementReturns = ifStmtReturns && this.statementReturns; } analyzeExpr(ifStmt.expr); } @Override public void visit(BLangMatch matchStmt) { analyzeExpr(matchStmt.expr); boolean staticLastPattern = false; if (!matchStmt.getStaticPatternClauses().isEmpty()) { staticLastPattern = analyzeStaticMatchPatterns(matchStmt); } boolean structuredLastPattern = false; if (!matchStmt.getStructuredPatternClauses().isEmpty()) { structuredLastPattern = analyzeStructuredMatchPatterns(matchStmt); } if (!matchStmt.getPatternClauses().isEmpty()) { analyzeEmptyMatchPatterns(matchStmt); analyzeMatchedPatterns(matchStmt, staticLastPattern, structuredLastPattern); } } @Override public void visit(BLangMatchStaticBindingPatternClause patternClause) { analyzeNode(patternClause.matchExpr, env); analyzeNode(patternClause.body, env); resetStatementReturns(); } @Override public void visit(BLangMatchStructuredBindingPatternClause patternClause) { analyzeNode(patternClause.matchExpr, env); analyzeNode(patternClause.body, env); resetStatementReturns(); } private void analyzeMatchedPatterns(BLangMatch matchStmt, boolean staticLastPattern, boolean structuredLastPattern) { if (staticLastPattern && structuredLastPattern) { dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CONTAINS_TWO_DEFAULT_PATTERNS); } if ((staticLastPattern && !hasErrorType(matchStmt.exprTypes)) || structuredLastPattern) { if (matchStmt.getPatternClauses().size() == 1) { dlog.error(matchStmt.getPatternClauses().get(0).pos, DiagnosticCode.MATCH_STMT_PATTERN_ALWAYS_MATCHES); } this.checkStatementExecutionValidity(matchStmt); boolean matchStmtReturns = true; for (BLangMatchBindingPatternClause patternClause : matchStmt.getPatternClauses()) { analyzeNode(patternClause.body, env); matchStmtReturns = matchStmtReturns && this.statementReturns; this.resetStatementReturns(); } this.statementReturns = matchStmtReturns; } } private boolean hasErrorType(List<BType> typeList) { return typeList.stream().anyMatch(t -> types.isAssignable(t, symTable.errorType)); } private boolean analyzeStructuredMatchPatterns(BLangMatch matchStmt) { if (matchStmt.exprTypes.isEmpty()) { return false; } for (BLangMatchStructuredBindingPatternClause patternClause : matchStmt.getStructuredPatternClauses()) { analyzeNode(patternClause, env); } return analyseStructuredBindingPatterns(matchStmt.getStructuredPatternClauses(), hasErrorType(matchStmt.exprTypes)); } /** * This method is used to check structured `var []`, `var {}` & static `[]`, `{}` match pattern. * * @param matchStmt the match statement containing structured & static match patterns. */ private void analyzeEmptyMatchPatterns(BLangMatch matchStmt) { List<BLangMatchBindingPatternClause> emptyLists = new ArrayList<>(); List<BLangMatchBindingPatternClause> emptyRecords = new ArrayList<>(); for (BLangMatchBindingPatternClause pattern : matchStmt.patternClauses) { if (pattern.getKind() == NodeKind.MATCH_STATIC_PATTERN_CLAUSE) { BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) pattern; if (staticPattern.literal.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) { BLangListConstructorExpr listLiteral = (BLangListConstructorExpr) staticPattern.literal; if (listLiteral.exprs.isEmpty()) { emptyLists.add(pattern); } } else if (staticPattern.literal.getKind() == NodeKind.RECORD_LITERAL_EXPR) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) staticPattern.literal; if (recordLiteral.fields.isEmpty()) { emptyRecords.add(pattern); } } } else if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) structuredPattern.bindingPatternVariable; if (tupleVariable.memberVariables.isEmpty() && tupleVariable.restVariable == null) { emptyLists.add(pattern); } } else if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariable recordVariable = (BLangRecordVariable) structuredPattern.bindingPatternVariable; if (recordVariable.variableList.isEmpty() && recordVariable.restParam == null) { emptyRecords.add(pattern); } } } } if (emptyLists.size() > 1) { for (int i = 1; i < emptyLists.size(); i++) { dlog.error(emptyLists.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } if (emptyRecords.size() > 1) { for (int i = 1; i < emptyRecords.size(); i++) { dlog.error(emptyRecords.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } } /** * This method is used to check the isLike test in a static match pattern. * @param matchStmt the match statment containing static match patterns. */ private boolean analyzeStaticMatchPatterns(BLangMatch matchStmt) { if (matchStmt.exprTypes.isEmpty()) { return false; } List<BLangMatchStaticBindingPatternClause> matchedPatterns = new ArrayList<>(); for (BLangMatchStaticBindingPatternClause pattern : matchStmt.getStaticPatternClauses()) { analyzeNode(pattern, env); List<BType> matchedExpTypes = matchStmt.exprTypes .stream() .filter(exprType -> isValidStaticMatchPattern(exprType, pattern.literal)) .collect(Collectors.toList()); if (matchedExpTypes.isEmpty()) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN); continue; } this.isJSONContext = types.isJSONContext(matchStmt.expr.type); analyzeNode(pattern.literal, env); matchedPatterns.add(pattern); } if (matchedPatterns.isEmpty()) { return false; } return analyzeStaticPatterns(matchedPatterns, hasErrorType(matchStmt.exprTypes)); } private boolean analyzeStaticPatterns(List<BLangMatchStaticBindingPatternClause> matchedPatterns, boolean errorTypeInMatchExpr) { BLangMatchStaticBindingPatternClause finalPattern = matchedPatterns.get(matchedPatterns.size() - 1); if (finalPattern.literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) finalPattern.literal).variableName.value.equals(Names.IGNORE.value) && !errorTypeInMatchExpr) { finalPattern.isLastPattern = true; } for (int i = 0; i < matchedPatterns.size() - 1; i++) { BLangExpression precedingPattern = matchedPatterns.get(i).literal; for (int j = i + 1; j < matchedPatterns.size(); j++) { BLangExpression pattern = matchedPatterns.get(j).literal; if (checkLiteralSimilarity(precedingPattern, pattern)) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); matchedPatterns.remove(j--); } } } return finalPattern.isLastPattern; } private boolean analyseStructuredBindingPatterns(List<BLangMatchStructuredBindingPatternClause> clauses, boolean errorTypeInMatchExpr) { BLangMatchStructuredBindingPatternClause finalPattern = clauses.get(clauses.size() - 1); if (finalPattern.bindingPatternVariable.getKind() == NodeKind.VARIABLE && finalPattern.typeGuardExpr == null && !(errorTypeInMatchExpr && isWildcardMatchPattern(finalPattern))) { finalPattern.isLastPattern = true; } BLangMatchStructuredBindingPatternClause currentPattern; BLangMatchStructuredBindingPatternClause precedingPattern; for (int i = 0; i < clauses.size(); i++) { precedingPattern = clauses.get(i); if (precedingPattern.typeGuardExpr != null) { analyzeExpr(precedingPattern.typeGuardExpr); } for (int j = i + 1; j < clauses.size(); j++) { currentPattern = clauses.get(j); BLangVariable precedingVar = precedingPattern.bindingPatternVariable; BLangVariable currentVar = currentPattern.bindingPatternVariable; if (checkStructuredPatternSimilarity(precedingVar, currentVar, errorTypeInMatchExpr) && checkTypeGuardSimilarity(precedingPattern.typeGuardExpr, currentPattern.typeGuardExpr)) { dlog.error(currentVar.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); clauses.remove(j--); } } } return finalPattern.isLastPattern; } private boolean isWildcardMatchPattern(BLangMatchStructuredBindingPatternClause finalPattern) { return ((BLangSimpleVariable) finalPattern.bindingPatternVariable).name.value.equals(Names.IGNORE.value); } /** * This method will check if two patterns are similar to each other. * Having similar patterns in the match block will result in unreachable pattern. * * @param precedingPattern pattern taken to compare similarity. * @param pattern the pattern that the precedingPattern is checked for similarity. * @return true if both patterns are similar. */ private boolean checkLiteralSimilarity(BLangExpression precedingPattern, BLangExpression pattern) { if (precedingPattern.getKind() == NodeKind.BINARY_EXPR) { BLangBinaryExpr precedingBinaryExpr = (BLangBinaryExpr) precedingPattern; BLangExpression precedingLhsExpr = precedingBinaryExpr.lhsExpr; BLangExpression precedingRhsExpr = precedingBinaryExpr.rhsExpr; return checkLiteralSimilarity(precedingLhsExpr, pattern) || checkLiteralSimilarity(precedingRhsExpr, pattern); } if (pattern.getKind() == NodeKind.BINARY_EXPR) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) pattern; BLangExpression lhsExpr = binaryExpr.lhsExpr; BLangExpression rhsExpr = binaryExpr.rhsExpr; return checkLiteralSimilarity(precedingPattern, lhsExpr) || checkLiteralSimilarity(precedingPattern, rhsExpr); } switch (precedingPattern.type.tag) { case TypeTags.MAP: if (pattern.type.tag == TypeTags.MAP) { BLangRecordLiteral precedingRecordLiteral = (BLangRecordLiteral) precedingPattern; Map<String, BLangExpression> recordLiteral = ((BLangRecordLiteral) pattern).fields .stream() .map(field -> (BLangRecordKeyValueField) field) .collect(Collectors.toMap( keyValuePair -> ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value, BLangRecordKeyValueField::getValue )); for (int i = 0; i < precedingRecordLiteral.fields.size(); i++) { BLangRecordKeyValueField bLangRecordKeyValue = (BLangRecordKeyValueField) precedingRecordLiteral.fields.get(i); String key = ((BLangSimpleVarRef) bLangRecordKeyValue.key.expr).variableName.value; if (!recordLiteral.containsKey(key)) { return false; } if (!checkLiteralSimilarity(bLangRecordKeyValue.valueExpr, recordLiteral.get(key))) { return false; } } return true; } return false; case TypeTags.TUPLE: if (pattern.type.tag == TypeTags.TUPLE) { BLangListConstructorExpr precedingTupleLiteral = (BLangListConstructorExpr) precedingPattern; BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) pattern; if (precedingTupleLiteral.exprs.size() != tupleLiteral.exprs.size()) { return false; } return IntStream.range(0, precedingTupleLiteral.exprs.size()) .allMatch(i -> checkLiteralSimilarity(precedingTupleLiteral.exprs.get(i), tupleLiteral.exprs.get(i))); } return false; case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: if (precedingPattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BConstantSymbol precedingPatternSym = (BConstantSymbol) ((BLangSimpleVarRef) precedingPattern).symbol; if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { if (!((BLangSimpleVarRef) pattern).variableName.value.equals(Names.IGNORE.value)) { BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol; return precedingPatternSym.value.equals(patternSym.value); } return false; } BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ? (BLangLiteral) ((BLangGroupExpr) pattern).expression : (BLangLiteral) pattern; return (precedingPatternSym.value.equals(literal.value)); } if (types.isValueType(pattern.type)) { BLangLiteral precedingLiteral = precedingPattern.getKind() == NodeKind.GROUP_EXPR ? (BLangLiteral) ((BLangGroupExpr) precedingPattern).expression : (BLangLiteral) precedingPattern; if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { if (pattern.type.tag != TypeTags.NONE) { BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol; return patternSym.value.equals(precedingLiteral.value); } return false; } BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ? (BLangLiteral) ((BLangGroupExpr) pattern).expression : (BLangLiteral) pattern; return (precedingLiteral.value.equals(literal.value)); } return false; case TypeTags.ANY: if (pattern.type.tag == TypeTags.ERROR) { return false; } return true; default: return false; } } /** * This method will determine if the type guard of the preceding pattern will result in the current pattern * being unreachable. * * @param precedingGuard type guard of the preceding structured pattern * @param currentGuard type guard of the cuurent structured pattern * @return true if the current pattern is unreachable due to the type guard of the preceding pattern */ private boolean checkTypeGuardSimilarity(BLangExpression precedingGuard, BLangExpression currentGuard) { if (precedingGuard != null && currentGuard != null) { if (precedingGuard.getKind() == NodeKind.TYPE_TEST_EXPR && currentGuard.getKind() == NodeKind.TYPE_TEST_EXPR && ((BLangTypeTestExpr) precedingGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangTypeTestExpr) currentGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangTypeTestExpr precedingTypeTest = (BLangTypeTestExpr) precedingGuard; BLangTypeTestExpr currentTypeTest = (BLangTypeTestExpr) currentGuard; return ((BLangSimpleVarRef) precedingTypeTest.expr).variableName.toString().equals( ((BLangSimpleVarRef) currentTypeTest.expr).variableName.toString()) && precedingTypeTest.typeNode.type.tag == currentTypeTest.typeNode.type.tag; } return false; } return currentGuard != null || precedingGuard == null; } /** * This method will determine if the current structured pattern will be unreachable due to a preceding pattern. * * @param precedingVar the structured pattern that appears on top * @param var the structured pattern that appears after the precedingVar * @param errorTypeInMatchExpr * @return true if the the current pattern is unreachable due to the preceding pattern */ private boolean checkStructuredPatternSimilarity(BLangVariable precedingVar, BLangVariable var, boolean errorTypeInMatchExpr) { if (precedingVar.type.tag == TypeTags.SEMANTIC_ERROR || var.type.tag == TypeTags.SEMANTIC_ERROR) { return false; } if (precedingVar.getKind() == NodeKind.RECORD_VARIABLE && var.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariable precedingRecVar = (BLangRecordVariable) precedingVar; BLangRecordVariable recVar = (BLangRecordVariable) var; Map<String, BLangVariable> recVarAsMap = recVar.variableList.stream() .collect(Collectors.toMap( keyValue -> keyValue.key.value, keyValue -> keyValue.valueBindingPattern )); if (precedingRecVar.variableList.size() > recVar.variableList.size()) { return false; } for (int i = 0; i < precedingRecVar.variableList.size(); i++) { BLangRecordVariableKeyValue precedingKeyValue = precedingRecVar.variableList.get(i); if (!recVarAsMap.containsKey(precedingKeyValue.key.value)) { return false; } if (!checkStructuredPatternSimilarity( precedingKeyValue.valueBindingPattern, recVarAsMap.get(precedingKeyValue.key.value), errorTypeInMatchExpr)) { return false; } } if (precedingRecVar.hasRestParam() && recVar.hasRestParam()) { return true; } return precedingRecVar.hasRestParam() || !recVar.hasRestParam(); } if (precedingVar.getKind() == NodeKind.TUPLE_VARIABLE && var.getKind() == NodeKind.TUPLE_VARIABLE) { List<BLangVariable> precedingMemberVars = ((BLangTupleVariable) precedingVar).memberVariables; BLangVariable precedingRestVar = ((BLangTupleVariable) precedingVar).restVariable; List<BLangVariable> memberVars = ((BLangTupleVariable) var).memberVariables; BLangVariable memberRestVar = ((BLangTupleVariable) var).restVariable; if (precedingRestVar != null && memberRestVar != null) { return true; } if (precedingRestVar == null && memberRestVar == null && precedingMemberVars.size() != memberVars.size()) { return false; } if (precedingRestVar != null && precedingMemberVars.size() > memberVars.size()) { return false; } if (memberRestVar != null) { return false; } for (int i = 0; i < memberVars.size(); i++) { if (!checkStructuredPatternSimilarity(precedingMemberVars.get(i), memberVars.get(i), errorTypeInMatchExpr)) { return false; } } return true; } if (precedingVar.getKind() == NodeKind.ERROR_VARIABLE && var.getKind() == NodeKind.ERROR_VARIABLE) { BLangErrorVariable precedingErrVar = (BLangErrorVariable) precedingVar; BLangErrorVariable errVar = (BLangErrorVariable) var; if (precedingErrVar.restDetail != null && isDirectErrorBindingPattern(precedingErrVar)) { return true; } if (errVar.restDetail != null) { return false; } if (precedingErrVar.detail != null && errVar.detail != null) { Map<String, BLangVariable> preDetails = precedingErrVar.detail.stream() .collect(Collectors.toMap(entry -> entry.key.value, entry -> entry.valueBindingPattern)); for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : errVar.detail) { BLangVariable correspondingCurDetail = preDetails.get(detailEntry.key.value); if (correspondingCurDetail == null) { return false; } boolean similar = checkStructuredPatternSimilarity(detailEntry.valueBindingPattern, correspondingCurDetail, errorTypeInMatchExpr); if (!similar) { return false; } } } return true; } if (precedingVar.getKind() == NodeKind.VARIABLE && ((BLangSimpleVariable) precedingVar).name.value.equals(Names.IGNORE.value) && var.getKind() == NodeKind.ERROR_VARIABLE) { return false; } return precedingVar.getKind() == NodeKind.VARIABLE; } private boolean isDirectErrorBindingPattern(BLangErrorVariable precedingErrVar) { return precedingErrVar.typeNode == null; } /** * This method will check if the static match pattern is valid based on the matching type. * * @param matchType type of the expression being matched. * @param literal the static match pattern. * @return true if the pattern is valid, else false. */ private boolean isValidStaticMatchPattern(BType matchType, BLangExpression literal) { if (literal.type.tag == TypeTags.NONE) { return true; } if (types.isSameType(literal.type, matchType)) { return true; } if (TypeTags.ANY == literal.type.tag) { return true; } switch (matchType.tag) { case TypeTags.ANY: case TypeTags.ANYDATA: case TypeTags.JSON: return true; case TypeTags.UNION: BUnionType unionMatchType = (BUnionType) matchType; return unionMatchType.getMemberTypes() .stream() .anyMatch(memberMatchType -> isValidStaticMatchPattern(memberMatchType, literal)); case TypeTags.TUPLE: if (literal.type.tag == TypeTags.TUPLE) { BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) literal; BTupleType literalTupleType = (BTupleType) literal.type; BTupleType matchTupleType = (BTupleType) matchType; if (literalTupleType.tupleTypes.size() != matchTupleType.tupleTypes.size()) { return false; } return IntStream.range(0, literalTupleType.tupleTypes.size()) .allMatch(i -> isValidStaticMatchPattern(matchTupleType.tupleTypes.get(i), tupleLiteral.exprs.get(i))); } break; case TypeTags.MAP: if (literal.type.tag == TypeTags.MAP) { BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal; return IntStream.range(0, mapLiteral.fields.size()) .allMatch(i -> isValidStaticMatchPattern(((BMapType) matchType).constraint, ((BLangRecordKeyValueField) mapLiteral.fields.get(i)).valueExpr)); } break; case TypeTags.RECORD: if (literal.type.tag == TypeTags.MAP) { BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal; BRecordType recordMatchType = (BRecordType) matchType; Map<String, BType> recordFields = recordMatchType.fields .stream() .collect(Collectors.toMap( field -> field.getName().getValue(), BField::getType )); for (RecordLiteralNode.RecordField field : mapLiteral.fields) { BLangRecordKeyValueField literalKeyValue = (BLangRecordKeyValueField) field; String literalKeyName; NodeKind nodeKind = literalKeyValue.key.expr.getKind(); if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) { literalKeyName = ((BLangSimpleVarRef) literalKeyValue.key.expr).variableName.value; } else if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { literalKeyName = ((BLangLiteral) literalKeyValue.key.expr).value.toString(); } else { return false; } if (recordFields.containsKey(literalKeyName)) { if (!isValidStaticMatchPattern( recordFields.get(literalKeyName), literalKeyValue.valueExpr)) { return false; } } else if (recordMatchType.sealed || !isValidStaticMatchPattern(recordMatchType.restFieldType, literalKeyValue.valueExpr)) { return false; } } return true; } break; case TypeTags.BYTE: if (literal.type.tag == TypeTags.INT) { return true; } break; case TypeTags.FINITE: if (literal.getKind() == NodeKind.LITERAL || literal.getKind() == NodeKind.NUMERIC_LITERAL) { return types.isAssignableToFiniteType(matchType, (BLangLiteral) literal); } if (literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) literal).symbol.getKind() == SymbolKind.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) ((BLangSimpleVarRef) literal).symbol; return types.isAssignableToFiniteType(matchType, (BLangLiteral) ((BFiniteType) constSymbol.type).getValueSpace().iterator().next()); } break; } return false; } @Override public void visit(BLangForeach foreach) { this.loopWithintransactionCheckStack.push(true); boolean statementReturns = this.statementReturns; this.checkStatementExecutionValidity(foreach); this.loopCount++; analyzeNode(foreach.body, env); this.loopCount--; this.statementReturns = statementReturns; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(foreach.collection); } @Override public void visit(BLangWhile whileNode) { this.loopWithintransactionCheckStack.push(true); boolean statementReturns = this.statementReturns; this.checkStatementExecutionValidity(whileNode); this.loopCount++; analyzeNode(whileNode.body, env); this.loopCount--; this.statementReturns = statementReturns; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(whileNode.expr); } @Override public void visit(BLangLock lockNode) { this.checkStatementExecutionValidity(lockNode); boolean previousWithinLockBlock = this.withinLockBlock; this.withinLockBlock = true; lockNode.body.stmts.forEach(e -> analyzeNode(e, env)); this.withinLockBlock = previousWithinLockBlock; } @Override public void visit(BLangContinue continueNode) { this.checkStatementExecutionValidity(continueNode); if (this.loopCount == 0) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); if (pkgEnv == null) { return; } analyzeNode(pkgEnv.node, env); } public void visit(BLangXMLNS xmlnsNode) { /* ignore */ } public void visit(BLangService serviceNode) { } public void visit(BLangResource resourceNode) { throw new RuntimeException("Deprecated lang feature"); } private void analyzeExportableTypeRef(BSymbol owner, BTypeSymbol symbol, boolean inFuncSignature, DiagnosticPos pos) { if (!inFuncSignature && Symbols.isFlagOn(owner.flags, Flags.ANONYMOUS)) { return; } if (Symbols.isPublic(owner)) { checkForExportableType(symbol, pos); } } private void checkForExportableType(BTypeSymbol symbol, DiagnosticPos pos) { if (symbol == null || symbol.type == null || Symbols.isFlagOn(symbol.flags, Flags.TYPE_PARAM)) { return; } switch (symbol.type.tag) { case TypeTags.ARRAY: checkForExportableType(((BArrayType) symbol.type).eType.tsymbol, pos); return; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) symbol.type; tupleType.tupleTypes.forEach(t -> checkForExportableType(t.tsymbol, pos)); if (tupleType.restType != null) { checkForExportableType(tupleType.restType.tsymbol, pos); } return; case TypeTags.MAP: checkForExportableType(((BMapType) symbol.type).constraint.tsymbol, pos); return; case TypeTags.RECORD: if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) { BRecordType recordType = (BRecordType) symbol.type; recordType.fields.forEach(f -> checkForExportableType(f.type.tsymbol, pos)); if (recordType.restFieldType != null) { checkForExportableType(recordType.restFieldType.tsymbol, pos); } return; } break; case TypeTags.TABLE: BTableType tableType = (BTableType) symbol.type; if (tableType.constraint != null) { checkForExportableType(tableType.constraint.tsymbol, pos); } return; case TypeTags.STREAM: BStreamType streamType = (BStreamType) symbol.type; if (streamType.constraint != null) { checkForExportableType(streamType.constraint.tsymbol, pos); } return; case TypeTags.INVOKABLE: BInvokableType invokableType = (BInvokableType) symbol.type; if (invokableType.paramTypes != null) { for (BType paramType : invokableType.paramTypes) { checkForExportableType(paramType.tsymbol, pos); } } if (invokableType.restType != null) { checkForExportableType(invokableType.restType.tsymbol, pos); } checkForExportableType(invokableType.retType.tsymbol, pos); return; } if (!Symbols.isPublic(symbol)) { dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name); } } public void visit(BLangLetExpression letExpression) { int ownerSymTag = this.env.scope.owner.tag; if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD) { dlog.error(letExpression.pos, DiagnosticCode.LET_EXPRESSION_NOT_YET_SUPPORTED_RECORD_FIELD); } else if ((ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) { dlog.error(letExpression.pos, DiagnosticCode.LET_EXPRESSION_NOT_YET_SUPPORTED_OBJECT_FIELD); } boolean returnStateBefore = this.statementReturns; this.statementReturns = false; for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { analyzeNode((BLangNode) letVariable.definitionNode, letExpression.env); } this.statementReturns = returnStateBefore; analyzeExpr(letExpression.expr, letExpression.env); } public void visit(BLangSimpleVariable varNode) { analyzeTypeNode(varNode.typeNode, this.env); BType varType = varNode.type; if (varType != null) { BTypeSymbol varTypeSymbol = varType.tsymbol; if (varTypeSymbol != null && Symbols.isFlagOn(varTypeSymbol.flags, Flags.DEPRECATED)) { dlog.warning(varNode.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, varNode.typeNode); } } analyzeExpr(varNode.expr); if (Objects.isNull(varNode.symbol)) { return; } if (!Symbols.isPublic(varNode.symbol)) { return; } int ownerSymTag = this.env.scope.owner.tag; if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD || (ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) { analyzeExportableTypeRef(this.env.scope.owner, varNode.type.tsymbol, false, varNode.pos); } else if ((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) { analyzeExportableTypeRef(varNode.symbol, varNode.type.tsymbol, false, varNode.pos); } varNode.annAttachments.forEach(annotationAttachment -> annotationAttachment.accept(this)); } private void checkWorkerPeerWorkerUsageInsideWorker(DiagnosticPos pos, BSymbol symbol, SymbolEnv env) { if ((symbol.flags & Flags.WORKER) == Flags.WORKER) { if (isCurrentPositionInWorker(env) && env.scope.lookup(symbol.name).symbol == null) { if (referingForkedWorkerOutOfFork(symbol, env)) { return; } dlog.error(pos, DiagnosticCode.INVALID_WORKER_REFERRENCE, symbol.name); } } } private boolean isCurrentPositionInWorker(SymbolEnv env) { if (env.enclInvokable != null && env.enclInvokable.flagSet.contains(Flag.WORKER)) { return true; } if (env.enclEnv != null && !(env.enclEnv.node.getKind() == NodeKind.PACKAGE || env.enclEnv.node.getKind() == NodeKind.OBJECT_TYPE)) { return isCurrentPositionInWorker(env.enclEnv); } return false; } private boolean referingForkedWorkerOutOfFork(BSymbol symbol, SymbolEnv env) { return (symbol.flags & Flags.FORKED) == Flags.FORKED && env.enclInvokable.getKind() == NodeKind.FUNCTION && ((BLangFunction) env.enclInvokable).anonForkName == null; } @Override public void visit(BLangTupleVariable bLangTupleVariable) { if (bLangTupleVariable.typeNode != null) { analyzeNode(bLangTupleVariable.typeNode, this.env); } analyzeExpr(bLangTupleVariable.expr); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { if (bLangRecordVariable.typeNode != null) { analyzeNode(bLangRecordVariable.typeNode, this.env); } analyzeExpr(bLangRecordVariable.expr); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { if (bLangErrorVariable.typeNode != null) { analyzeNode(bLangErrorVariable.typeNode, this.env); } analyzeExpr(bLangErrorVariable.expr); } private BType getNilableType(BType type) { if (type.isNullable()) { return type; } BUnionType unionType = BUnionType.create(null); if (type.tag == TypeTags.UNION) { LinkedHashSet<BType> memTypes = new LinkedHashSet<>(((BUnionType) type).getMemberTypes()); unionType.addAll(memTypes); } unionType.add(type); unionType.add(symTable.nilType); return unionType; } public void visit(BLangIdentifier identifierNode) { /* ignore */ } public void visit(BLangAnnotation annotationNode) { annotationNode.annAttachments.forEach(annotationAttachment -> annotationAttachment.accept(this)); } public void visit(BLangAnnotationAttachment annAttachmentNode) { BAnnotationSymbol annotationSymbol = annAttachmentNode.annotationSymbol; if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) { dlog.warning(annAttachmentNode.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, annotationSymbol); } } public void visit(BLangSimpleVariableDef varDefNode) { this.checkStatementExecutionValidity(varDefNode); analyzeNode(varDefNode.var, env); } public void visit(BLangCompoundAssignment compoundAssignment) { this.checkStatementExecutionValidity(compoundAssignment); analyzeExpr(compoundAssignment.varRef); analyzeExpr(compoundAssignment.expr); } public void visit(BLangAssignment assignNode) { this.checkStatementExecutionValidity(assignNode); analyzeExpr(assignNode.varRef); analyzeExpr(assignNode.expr); } public void visit(BLangRecordDestructure stmt) { this.checkDuplicateVarRefs(getVarRefs(stmt.varRef)); this.checkStatementExecutionValidity(stmt); analyzeExpr(stmt.varRef); analyzeExpr(stmt.expr); } public void visit(BLangErrorDestructure stmt) { this.checkDuplicateVarRefs(getVarRefs(stmt.varRef)); this.checkStatementExecutionValidity(stmt); analyzeExpr(stmt.varRef); analyzeExpr(stmt.expr); } @Override public void visit(BLangTupleDestructure stmt) { this.checkDuplicateVarRefs(getVarRefs(stmt.varRef)); this.checkStatementExecutionValidity(stmt); analyzeExpr(stmt.varRef); analyzeExpr(stmt.expr); } private void checkDuplicateVarRefs(List<BLangExpression> varRefs) { checkDuplicateVarRefs(varRefs, new HashSet<>()); } private void checkDuplicateVarRefs(List<BLangExpression> varRefs, Set<BSymbol> symbols) { for (BLangExpression varRef : varRefs) { if (varRef == null || (varRef.getKind() != NodeKind.SIMPLE_VARIABLE_REF && varRef.getKind() != NodeKind.RECORD_VARIABLE_REF && varRef.getKind() != NodeKind.ERROR_VARIABLE_REF && varRef.getKind() != NodeKind.TUPLE_VARIABLE_REF)) { continue; } if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF && names.fromIdNode(((BLangSimpleVarRef) varRef).variableName) == Names.IGNORE) { continue; } if (varRef.getKind() == NodeKind.TUPLE_VARIABLE_REF) { checkDuplicateVarRefs(getVarRefs((BLangTupleVarRef) varRef), symbols); } if (varRef.getKind() == NodeKind.RECORD_VARIABLE_REF) { checkDuplicateVarRefs(getVarRefs((BLangRecordVarRef) varRef), symbols); } if (varRef.getKind() == NodeKind.ERROR_VARIABLE_REF) { checkDuplicateVarRefs(getVarRefs((BLangErrorVarRef) varRef), symbols); } BLangVariableReference varRefExpr = (BLangVariableReference) varRef; if (varRefExpr.symbol != null && !symbols.add(varRefExpr.symbol)) { this.dlog.error(varRef.pos, DiagnosticCode.DUPLICATE_VARIABLE_IN_BINDING_PATTERN, varRefExpr.symbol); } } } private List<BLangExpression> getVarRefs(BLangRecordVarRef varRef) { List<BLangExpression> varRefs = varRef.recordRefFields.stream() .map(e -> e.variableReference).collect(Collectors.toList()); varRefs.add((BLangExpression) varRef.restParam); return varRefs; } private List<BLangExpression> getVarRefs(BLangErrorVarRef varRef) { List<BLangExpression> varRefs = new ArrayList<>(); varRefs.add(varRef.reason); varRefs.addAll(varRef.detail.stream().map(e -> e.expr).collect(Collectors.toList())); varRefs.add(varRef.restVar); return varRefs; } private List<BLangExpression> getVarRefs(BLangTupleVarRef varRef) { List<BLangExpression> varRefs = new ArrayList<>(varRef.expressions); varRefs.add((BLangExpression) varRef.restParam); return varRefs; } public void visit(BLangBreak breakNode) { this.checkStatementExecutionValidity(breakNode); if (this.loopCount == 0) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangThrow throwNode) { /* ignore */ } public void visit(BLangPanic panicNode) { this.checkStatementExecutionValidity(panicNode); this.statementReturns = true; analyzeExpr(panicNode.expr); } public void visit(BLangXMLNSStatement xmlnsStmtNode) { this.checkStatementExecutionValidity(xmlnsStmtNode); } public void visit(BLangExpressionStmt exprStmtNode) { this.checkStatementExecutionValidity(exprStmtNode); analyzeExpr(exprStmtNode.expr); validateExprStatementExpression(exprStmtNode); } private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) { BLangExpression expr = exprStmtNode.expr; if (expr.getKind() == NodeKind.WORKER_SYNC_SEND) { return; } while (expr.getKind() == NodeKind.MATCH_EXPRESSION || expr.getKind() == NodeKind.CHECK_EXPR || expr.getKind() == NodeKind.CHECK_PANIC_EXPR) { if (expr.getKind() == NodeKind.MATCH_EXPRESSION) { expr = ((BLangMatchExpression) expr).expr; } else if (expr.getKind() == NodeKind.CHECK_EXPR) { expr = ((BLangCheckedExpr) expr).expr; } else if (expr.getKind() == NodeKind.CHECK_PANIC_EXPR) { expr = ((BLangCheckPanickedExpr) expr).expr; } } if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.WAIT_EXPR) { return; } if (expr.type == symTable.nilType) { dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT); } } public void visit(BLangTryCatchFinally tryNode) { /* ignore */ } public void visit(BLangCatch catchNode) { /* ignore */ } private boolean isTopLevel() { SymbolEnv env = this.env; return env.enclInvokable.body == env.node; } private boolean isInWorker() { return env.enclInvokable.flagSet.contains(Flag.WORKER); } private boolean isCommunicationAllowedLocation(String workerIdentifier) { return (isDefaultWorkerCommunication(workerIdentifier) && isInWorker()) || isTopLevel(); } private boolean isDefaultWorkerCommunication(String workerIdentifier) { return workerIdentifier.equals(DEFAULT_WORKER_NAME); } private boolean workerExists(BType type, String workerName) { if (isDefaultWorkerCommunication(workerName) && isInWorker()) { return true; } if (type == symTable.semanticError) { return false; } return type.tag == TypeTags.FUTURE && ((BFutureType) type).workerDerivative; } public void visit(BLangWorkerSend workerSendNode) { BSymbol receiver = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerSendNode.workerIdentifier)); if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) { receiver = symTable.notFoundSymbol; } verifyPeerCommunication(workerSendNode.pos, receiver, workerSendNode.workerIdentifier.value); this.checkStatementExecutionValidity(workerSendNode); if (workerSendNode.isChannel) { analyzeExpr(workerSendNode.expr); if (workerSendNode.keyExpr != null) { analyzeExpr(workerSendNode.keyExpr); } return; } WorkerActionSystem was = this.workerActionSystemStack.peek(); BType type = workerSendNode.expr.type; if (type == symTable.semanticError) { was.hasErrors = true; } else if (!type.isAnydata()) { this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, type); } String workerName = workerSendNode.workerIdentifier.getValue(); boolean allowedLocation = isCommunicationAllowedLocation(workerName); if (!allowedLocation) { this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION); was.hasErrors = true; } if (!this.workerExists(workerSendNode.type, workerName)) { this.dlog.error(workerSendNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); was.hasErrors = true; } workerSendNode.type = createAccumulatedErrorTypeForMatchingRecive(workerSendNode.pos, workerSendNode.expr.type); was.addWorkerAction(workerSendNode); analyzeExpr(workerSendNode.expr); validateActionParentNode(workerSendNode.pos, workerSendNode.expr); } private BType createAccumulatedErrorTypeForMatchingRecive(DiagnosticPos pos, BType exprType) { Set<BType> returnTypesUpToNow = this.returnTypes.peek(); LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<BType>() { { Comparator.comparing(BType::toString); } }; for (BType returnType : returnTypesUpToNow) { if (returnType.tag == TypeTags.ERROR) { returnTypeAndSendType.add(returnType); } else { this.dlog.error(pos, DiagnosticCode.WORKER_SEND_AFTER_RETURN); } } returnTypeAndSendType.add(exprType); if (returnTypeAndSendType.size() > 1) { return BUnionType.create(null, returnTypeAndSendType); } else { return exprType; } } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol receiver = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) { receiver = symTable.notFoundSymbol; } verifyPeerCommunication(syncSendExpr.pos, receiver, syncSendExpr.workerIdentifier.value); validateActionParentNode(syncSendExpr.pos, syncSendExpr); String workerName = syncSendExpr.workerIdentifier.getValue(); WorkerActionSystem was = this.workerActionSystemStack.peek(); boolean allowedLocation = isCommunicationAllowedLocation(workerName); if (!allowedLocation) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION); was.hasErrors = true; } if (!this.workerExists(syncSendExpr.workerType, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); was.hasErrors = true; } syncSendExpr.type = createAccumulatedErrorTypeForMatchingRecive(syncSendExpr.pos, syncSendExpr.expr.type); was.addWorkerAction(syncSendExpr); analyzeExpr(syncSendExpr.expr); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { validateActionParentNode(workerReceiveNode.pos, workerReceiveNode); BSymbol sender = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveNode.workerIdentifier)); if ((sender.tag & SymTag.VARIABLE) != SymTag.VARIABLE) { sender = symTable.notFoundSymbol; } verifyPeerCommunication(workerReceiveNode.pos, sender, workerReceiveNode.workerIdentifier.value); if (workerReceiveNode.isChannel) { if (workerReceiveNode.keyExpr != null) { analyzeExpr(workerReceiveNode.keyExpr); } return; } WorkerActionSystem was = this.workerActionSystemStack.peek(); String workerName = workerReceiveNode.workerIdentifier.getValue(); boolean allowedLocation = isCommunicationAllowedLocation(workerName); if (!allowedLocation) { this.dlog.error(workerReceiveNode.pos, DiagnosticCode.INVALID_WORKER_RECEIVE_POSITION); was.hasErrors = true; } if (!this.workerExists(workerReceiveNode.workerType, workerName)) { this.dlog.error(workerReceiveNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); was.hasErrors = true; } workerReceiveNode.matchingSendsError = createAccumulatedErrorTypeForMatchingSyncSend(workerReceiveNode); was.addWorkerAction(workerReceiveNode); } private void verifyPeerCommunication(DiagnosticPos pos, BSymbol otherWorker, String otherWorkerName) { if (env.enclEnv.node.getKind() != NodeKind.FUNCTION) { return; } BLangFunction funcNode = (BLangFunction) env.enclEnv.node; Set<Flag> flagSet = funcNode.flagSet; Name workerDerivedName = names.fromString("0" + otherWorker.name.value); if (flagSet.contains(Flag.WORKER)) { if (otherWorkerName.equals(DEFAULT_WORKER_NAME)) { if (flagSet.contains(Flag.FORKED)) { dlog.error(pos, DiagnosticCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS); } return; } Scope enclFunctionScope = env.enclEnv.enclEnv.scope; BInvokableSymbol wLambda = (BInvokableSymbol) enclFunctionScope.lookup(workerDerivedName).symbol; if (wLambda != null && funcNode.anonForkName != null && !funcNode.anonForkName.equals(wLambda.enclForkName)) { dlog.error(pos, DiagnosticCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS); } } else { BInvokableSymbol wLambda = (BInvokableSymbol) env.scope.lookup(workerDerivedName).symbol; if (wLambda != null && wLambda.enclForkName != null) { dlog.error(pos, DiagnosticCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS); } } } public BType createAccumulatedErrorTypeForMatchingSyncSend(BLangWorkerReceive workerReceiveNode) { Set<BType> returnTypesUpToNow = this.returnTypes.peek(); LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>(); for (BType returnType : returnTypesUpToNow) { if (returnType.tag == TypeTags.ERROR) { returnTypeAndSendType.add(returnType); } else { this.dlog.error(workerReceiveNode.pos, DiagnosticCode.WORKER_RECEIVE_AFTER_RETURN); } } returnTypeAndSendType.add(symTable.nilType); if (returnTypeAndSendType.size() > 1) { return BUnionType.create(null, returnTypeAndSendType); } else { return symTable.nilType; } } public void visit(BLangLiteral literalExpr) { if (literalExpr.type.tag == TypeTags.NIL && NULL_LITERAL.equals(literalExpr.originalValue) && !literalExpr.isJSONContext && !this.isJSONContext) { dlog.error(literalExpr.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL); } } public void visit(BLangListConstructorExpr listConstructorExpr) { analyzeExprs(listConstructorExpr.exprs); } public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { analyzeExpr(((BLangRecordKeyValueField) field).valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { analyzeExpr((BLangRecordLiteral.BLangRecordVarNameField) field); } else { analyzeExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr); } } Set<Object> names = new HashSet<>(); BType type = recordLiteral.type; boolean isOpenRecord = type != null && type.tag == TypeTags.RECORD && !((BRecordType) type).sealed; for (RecordLiteralNode.RecordField field : fields) { BLangExpression keyExpr; if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; BLangExpression spreadOpExpr = spreadOpField.expr; analyzeExpr(spreadOpExpr); if (spreadOpExpr.type.tag != TypeTags.RECORD) { continue; } for (BField bField : ((BRecordType) spreadOpExpr.type).fields) { if (Symbols.isOptional(bField.symbol)) { continue; } String name = bField.name.value; if (names.contains(name)) { this.dlog.error(spreadOpExpr.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL_SPREAD_OP, recordLiteral.expectedType.getKind().typeName(), name, spreadOpField); } names.add(name); } } else { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKey key = ((BLangRecordKeyValueField) field).key; keyExpr = key.expr; if (key.computedKey) { analyzeExpr(keyExpr); continue; } } else { keyExpr = (BLangRecordLiteral.BLangRecordVarNameField) field; } if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { String name = ((BLangSimpleVarRef) keyExpr).variableName.value; if (names.contains(name)) { this.dlog.error(keyExpr.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, recordLiteral.expectedType.getKind().typeName(), name); } if (isOpenRecord && ((BRecordType) type).fields.stream() .noneMatch(recField -> name.equals(recField.name.value))) { dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_IDENTIFIER_KEY, name); } names.add(name); } else if (keyExpr.getKind() == NodeKind.LITERAL || keyExpr.getKind() == NodeKind.NUMERIC_LITERAL) { Object name = ((BLangLiteral) keyExpr).value; if (names.contains(name)) { this.dlog.error(keyExpr.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, recordLiteral.parent.type.getKind().typeName(), name); } names.add(name); } } } } public void visit(BLangTableLiteral tableLiteral) { /* ignore */ } public void visit(BLangSimpleVarRef varRefExpr) { switch (varRefExpr.parent.getKind()) { case WORKER_RECEIVE: case WORKER_SEND: case WORKER_SYNC_SEND: return; default: if (varRefExpr.type != null && varRefExpr.type.tag == TypeTags.FUTURE) { checkWorkerPeerWorkerUsageInsideWorker(varRefExpr.pos, varRefExpr.symbol, this.env); } } if (varRefExpr.symbol != null && Symbols.isFlagOn(varRefExpr.symbol.flags, Flags.DEPRECATED)) { dlog.warning(varRefExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, varRefExpr); } } public void visit(BLangRecordVarRef varRefExpr) { /* ignore */ } public void visit(BLangErrorVarRef varRefExpr) { /* ignore */ } public void visit(BLangTupleVarRef varRefExpr) { /* ignore */ } public void visit(BLangFieldBasedAccess fieldAccessExpr) { analyzeExpr(fieldAccessExpr.expr); BSymbol symbol = fieldAccessExpr.symbol; if (symbol != null && Symbols.isFlagOn(fieldAccessExpr.symbol.flags, Flags.DEPRECATED)) { dlog.warning(fieldAccessExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, fieldAccessExpr); } } public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeExpr(indexAccessExpr.indexExpr); analyzeExpr(indexAccessExpr.expr); } public void visit(BLangInvocation invocationExpr) { analyzeExpr(invocationExpr.expr); analyzeExprs(invocationExpr.requiredArgs); analyzeExprs(invocationExpr.restArgs); if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) { BSymbol funcSymbol = invocationExpr.symbol; if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) { dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, invocationExpr); } } if (invocationExpr.actionInvocation || invocationExpr.async) { if (invocationExpr.actionInvocation || !this.withinLockBlock) { validateActionInvocation(invocationExpr.pos, invocationExpr); return; } dlog.error(invocationExpr.pos, invocationExpr.functionPointerInvocation ? DiagnosticCode.USAGE_OF_WORKER_WITHIN_LOCK_IS_PROHIBITED : DiagnosticCode.USAGE_OF_START_WITHIN_LOCK_IS_PROHIBITED); } } private void validateActionInvocation(DiagnosticPos pos, BLangInvocation iExpr) { if (iExpr.expr != null) { final NodeKind clientNodeKind = iExpr.expr.getKind(); if (clientNodeKind != NodeKind.SIMPLE_VARIABLE_REF && clientNodeKind != NodeKind.FIELD_BASED_ACCESS_EXPR) { dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } else if (clientNodeKind == NodeKind.FIELD_BASED_ACCESS_EXPR) { final BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) iExpr.expr; if (fieldBasedAccess.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } else { final BLangSimpleVarRef selfName = (BLangSimpleVarRef) fieldBasedAccess.expr; if (!Names.SELF.equals(selfName.symbol.name)) { dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } } } } validateActionParentNode(pos, iExpr); } /** * Actions can only occur as part of a statement or nested inside other actions. */ private void validateActionParentNode(DiagnosticPos pos, BLangNode node) { BLangNode parent = node.parent; if (parent.getKind() == NodeKind.BLOCK) { return; } while (parent != null) { final NodeKind kind = parent.getKind(); if (kind == NodeKind.ASSIGNMENT || kind == NodeKind.EXPRESSION_STATEMENT || kind == NodeKind.RETURN || kind == NodeKind.RECORD_DESTRUCTURE || kind == NodeKind.ERROR_DESTRUCTURE || kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE || kind == NodeKind.RECORD_VARIABLE || kind == NodeKind.TUPLE_VARIABLE || kind == NodeKind.ERROR_VARIABLE || kind == NodeKind.MATCH || kind == NodeKind.FOREACH) { return; } else if (kind == NodeKind.CHECK_PANIC_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.WORKER_RECEIVE || kind == NodeKind.WORKER_FLUSH || kind == NodeKind.WORKER_SEND || kind == NodeKind.WAIT_EXPR || kind == NodeKind.GROUP_EXPR || kind == NodeKind.TRAP_EXPR) { parent = parent.parent; if (parent.getKind() == NodeKind.BLOCK || parent.getKind() == NodeKind.BLOCK_FUNCTION_BODY) { return; } continue; } else if (kind == NodeKind.ELVIS_EXPR && ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION && ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) { parent = parent.parent; continue; } break; } dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } public void visit(BLangTypeInit cIExpr) { analyzeExprs(cIExpr.argsExpr); analyzeExpr(cIExpr.initInvocation); BType type = cIExpr.type; if (cIExpr.userDefinedType != null && Symbols.isFlagOn(type.tsymbol.flags, Flags.DEPRECATED)) { dlog.warning(cIExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, type); } } public void visit(BLangTernaryExpr ternaryExpr) { analyzeExpr(ternaryExpr.expr); boolean isJSONCtx = getIsJSONContext(ternaryExpr.type); this.isJSONContext = isJSONCtx; analyzeExpr(ternaryExpr.thenExpr); this.isJSONContext = isJSONCtx; analyzeExpr(ternaryExpr.elseExpr); } public void visit(BLangWaitExpr awaitExpr) { analyzeExpr(awaitExpr.getExpression()); validateActionParentNode(awaitExpr.pos, awaitExpr); } public void visit(BLangWaitForAllExpr waitForAllExpr) { waitForAllExpr.keyValuePairs.forEach(keyValue -> { BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr; analyzeExpr(expr); }); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { analyzeExpr(xmlElementAccess.expr); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { analyzeExpr(xmlNavigation.expr); if (xmlNavigation.childIndex != null) { if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS || xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { dlog.error(xmlNavigation.pos, DiagnosticCode.UNSUPPORTED_INDEX_IN_XML_NAVIGATION); } analyzeExpr(xmlNavigation.childIndex); } validateMethodInvocationsInXMLNavigationExpression(xmlNavigation); } private void validateMethodInvocationsInXMLNavigationExpression(BLangXMLNavigationAccess expression) { if (!expression.methodInvocationAnalyzed && expression.parent.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expression.parent; if (invocation.argExprs.contains(expression) && ((invocation.symbol.flags & Flags.LANG_LIB) != Flags.LANG_LIB)) { return; } dlog.error(invocation.pos, DiagnosticCode.UNSUPPORTED_METHOD_INVOCATION_XML_NAV); } expression.methodInvocationAnalyzed = true; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { BLangIdentifier flushWrkIdentifier = workerFlushExpr.workerIdentifier; Stack<WorkerActionSystem> workerActionSystems = this.workerActionSystemStack; WorkerActionSystem currentWrkerAction = workerActionSystems.peek(); List<BLangWorkerSend> sendStmts = getAsyncSendStmtsOfWorker(currentWrkerAction); if (flushWrkIdentifier != null) { List<BLangWorkerSend> sendsToGivenWrkr = sendStmts.stream() .filter(bLangNode -> bLangNode.workerIdentifier.equals (flushWrkIdentifier)) .collect(Collectors.toList()); if (sendsToGivenWrkr.size() == 0) { this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH_FOR_WORKER, flushWrkIdentifier, currentWrkerAction.currentWorkerId()); return; } else { sendStmts = sendsToGivenWrkr; } } else { if (sendStmts.size() == 0) { this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH, currentWrkerAction.currentWorkerId()); return; } } workerFlushExpr.cachedWorkerSendStmts = sendStmts; validateActionParentNode(workerFlushExpr.pos, workerFlushExpr); } private List<BLangWorkerSend> getAsyncSendStmtsOfWorker(WorkerActionSystem currentWorkerAction) { List<BLangNode> actions = currentWorkerAction.workerActionStateMachines.peek().actions; return actions.stream() .filter(CodeAnalyzer::isWorkerSend) .map(bLangNode -> (BLangWorkerSend) bLangNode) .collect(Collectors.toList()); } @Override public void visit(BLangTrapExpr trapExpr) { analyzeExpr(trapExpr.expr); } public void visit(BLangBinaryExpr binaryExpr) { if (validateBinaryExpr(binaryExpr)) { boolean isJSONCtx = getIsJSONContext(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type); this.isJSONContext = isJSONCtx; analyzeExpr(binaryExpr.lhsExpr); this.isJSONContext = isJSONCtx; analyzeExpr(binaryExpr.rhsExpr); } } private boolean validateBinaryExpr(BLangBinaryExpr binaryExpr) { if (binaryExpr.lhsExpr.type.tag != TypeTags.FUTURE && binaryExpr.rhsExpr.type.tag != TypeTags.FUTURE) { return true; } BLangNode parentNode = binaryExpr.parent; if (binaryExpr.lhsExpr.type.tag == TypeTags.FUTURE || binaryExpr.rhsExpr.type.tag == TypeTags.FUTURE) { if (parentNode == null) { return false; } if (parentNode.getKind() == NodeKind.WAIT_EXPR) { return true; } } if (parentNode.getKind() != NodeKind.BINARY_EXPR && binaryExpr.opKind == OperatorKind.BITWISE_OR) { dlog.error(binaryExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED, OperatorKind.BITWISE_OR, symTable.futureType); return false; } if (parentNode.getKind() == NodeKind.BINARY_EXPR) { return validateBinaryExpr((BLangBinaryExpr) parentNode); } return true; } public void visit(BLangElvisExpr elvisExpr) { analyzeExpr(elvisExpr.lhsExpr); analyzeExpr(elvisExpr.rhsExpr); } @Override public void visit(BLangGroupExpr groupExpr) { analyzeExpr(groupExpr.expression); } public void visit(BLangUnaryExpr unaryExpr) { analyzeExpr(unaryExpr.expr); } public void visit(BLangTypedescExpr accessExpr) { /* ignore */ } public void visit(BLangXMLQName xmlQName) { /* ignore */ } public void visit(BLangXMLAttribute xmlAttribute) { analyzeExpr(xmlAttribute.name); analyzeExpr(xmlAttribute.value); } public void visit(BLangXMLElementLiteral xmlElementLiteral) { analyzeExpr(xmlElementLiteral.startTagName); analyzeExpr(xmlElementLiteral.endTagName); analyzeExprs(xmlElementLiteral.attributes); analyzeExprs(xmlElementLiteral.children); } public void visit(BLangXMLTextLiteral xmlTextLiteral) { analyzeExprs(xmlTextLiteral.textFragments); } public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { analyzeExprs(xmlCommentLiteral.textFragments); } public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { analyzeExprs(xmlProcInsLiteral.dataFragments); analyzeExpr(xmlProcInsLiteral.target); } public void visit(BLangXMLQuotedString xmlQuotedString) { analyzeExprs(xmlQuotedString.textFragments); } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { analyzeExprs(stringTemplateLiteral.exprs); } public void visit(BLangLambdaFunction bLangLambdaFunction) { boolean isWorker = false; if (bLangLambdaFunction.parent.getKind() == NodeKind.VARIABLE) { String workerVarName = ((BLangSimpleVariable) bLangLambdaFunction.parent).name.value; if (workerVarName.startsWith(WORKER_LAMBDA_VAR_PREFIX)) { String workerName = workerVarName.substring(1); isWorker = true; this.workerActionSystemStack.peek().startWorkerActionStateMachine(workerName, bLangLambdaFunction.function.pos, bLangLambdaFunction.function); } } boolean statementReturn = this.statementReturns; this.visitFunction(bLangLambdaFunction.function); this.statementReturns = statementReturn; if (isWorker) { this.workerActionSystemStack.peek().endWorkerActionStateMachine(); } } public void visit(BLangArrowFunction bLangArrowFunction) { analyzeExpr(bLangArrowFunction.body.expr); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { analyzeExpr(xmlAttributeAccessExpr.expr); analyzeExpr(xmlAttributeAccessExpr.indexExpr); } public void visit(BLangIntRangeExpression intRangeExpression) { analyzeExpr(intRangeExpression.startExpr); analyzeExpr(intRangeExpression.endExpr); } /* Type Nodes */ @Override public void visit(BLangRecordTypeNode recordTypeNode) { SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env); if (recordTypeNode.isFieldAnalyseRequired) { recordTypeNode.fields.forEach(field -> analyzeNode(field, recordEnv)); } } @Override public void visit(BLangObjectTypeNode objectTypeNode) { SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env); if (objectTypeNode.isFieldAnalyseRequired) { objectTypeNode.fields.forEach(field -> analyzeNode(field, objectEnv)); } Stream.concat(objectTypeNode.functions.stream(), Optional.ofNullable(objectTypeNode.initFunction).map(Stream::of).orElseGet(Stream::empty)) .sorted(Comparator.comparingInt(fn -> fn.pos.sLine)) .forEachOrdered(fn -> this.analyzeNode(fn, objectEnv)); } @Override public void visit(BLangValueType valueType) { /* ignore */ } @Override public void visit(BLangArrayType arrayType) { analyzeTypeNode(arrayType.elemtype, env); } public void visit(BLangBuiltInRefTypeNode builtInRefType) { /* ignore */ } public void visit(BLangConstrainedType constrainedType) { analyzeTypeNode(constrainedType.constraint, env); } public void visit(BLangStreamType streamType) { analyzeTypeNode(streamType.constraint, env); analyzeTypeNode(streamType.error, env); } public void visit(BLangErrorType errorType) { analyzeTypeNode(errorType.reasonType, env); analyzeTypeNode(errorType.detailType, env); } public void visit(BLangUserDefinedType userDefinedType) { /* Ignore */ } public void visit(BLangTupleTypeNode tupleTypeNode) { tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env)); analyzeTypeNode(tupleTypeNode.restParamType, env); } public void visit(BLangUnionTypeNode unionTypeNode) { unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env)); } public void visit(BLangFunctionTypeNode functionTypeNode) { functionTypeNode.params.forEach(node -> analyzeNode(node, env)); analyzeTypeNode(functionTypeNode.returnTypeNode, env); } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { /* Ignore */ } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { analyzeExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { analyzeExpr(bLangNamedArgsExpression.expr); } @Override public void visit(BLangMatchExpression bLangMatchExpression) { } @Override public void visit(BLangCheckedExpr checkedExpr) { analyzeExpr(checkedExpr.expr); if (this.env.scope.owner.getKind() == SymbolKind.PACKAGE) { return; } BType exprType = env.enclInvokable.getReturnTypeNode().type; if (!types.isAssignable(getErrorTypes(checkedExpr.expr.type), exprType)) { dlog.error(checkedExpr.pos, DiagnosticCode.CHECKED_EXPR_NO_MATCHING_ERROR_RETURN_IN_ENCL_INVOKABLE); } if (checkReturnValidityInTransaction()) { this.dlog.error(checkedExpr.pos, DiagnosticCode.CHECK_EXPRESSION_INVALID_USAGE_WITHIN_TRANSACTION_BLOCK); return; } returnTypes.peek().add(exprType); } @Override public void visit(BLangCheckPanickedExpr checkPanicExpr) { analyzeExpr(checkPanicExpr.expr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { } @Override public void visit(BLangQueryExpr queryExpr) { int fromCount = 0; for (FromClauseNode fromClauseNode : queryExpr.fromClauseList) { fromCount++; BLangExpression collection = (BLangExpression) fromClauseNode.getCollection(); if (fromCount > 1) { if (TypeTags.STREAM == collection.type.tag) { this.dlog.error(collection.pos, DiagnosticCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM); } } analyzeNode((BLangFromClause) fromClauseNode, env); } for (WhereClauseNode whereClauseNode : queryExpr.whereClauseList) { analyzeNode((BLangWhereClause) whereClauseNode, env); } analyzeNode(queryExpr.selectClause, env); } @Override public void visit(BLangQueryAction queryAction) { int fromCount = 0; for (FromClauseNode fromClauseNode : queryAction.fromClauseList) { fromCount++; BLangExpression collection = (BLangExpression) fromClauseNode.getCollection(); if (fromCount > 1) { if (TypeTags.STREAM == collection.type.tag) { this.dlog.error(collection.pos, DiagnosticCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM); } } analyzeNode((BLangFromClause) fromClauseNode, env); } for (WhereClauseNode whereClauseNode : queryAction.whereClauseList) { analyzeNode((BLangWhereClause) whereClauseNode, env); } analyzeNode(queryAction.doClause, env); validateActionParentNode(queryAction.pos, queryAction); } @Override public void visit(BLangFromClause fromClause) { analyzeExpr(fromClause.collection); } @Override public void visit(BLangWhereClause whereClause) { analyzeExpr(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { analyzeExpr(selectClause.expression); } @Override public void visit(BLangDoClause doClause) { analyzeNode(doClause.body, env); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { analyzeNode(typeTestExpr.expr, env); if (typeTestExpr.typeNode.type == symTable.semanticError || typeTestExpr.expr.type == symTable.semanticError) { return; } if (types.isAssignable(typeTestExpr.expr.type, typeTestExpr.typeNode.type)) { dlog.error(typeTestExpr.pos, DiagnosticCode.UNNECESSARY_CONDITION); return; } if (!types.isAssignable(typeTestExpr.typeNode.type, typeTestExpr.expr.type) && !indirectIntersectionExists(typeTestExpr.expr, typeTestExpr.typeNode.type)) { dlog.error(typeTestExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPE_CHECK, typeTestExpr.expr.type, typeTestExpr.typeNode.type); } } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { analyzeExpr(annotAccessExpr.expr); BAnnotationSymbol annotationSymbol = annotAccessExpr.annotationSymbol; if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) { dlog.warning(annotAccessExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, annotationSymbol); } } private boolean indirectIntersectionExists(BLangExpression expression, BType testType) { BType expressionType = expression.type; switch (expressionType.tag) { case TypeTags.UNION: if (types.getTypeForUnionTypeMembersAssignableToType((BUnionType) expressionType, testType) != symTable.semanticError) { return true; } break; case TypeTags.FINITE: if (types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) expressionType, testType) != symTable.semanticError) { return true; } } switch (testType.tag) { case TypeTags.UNION: return types.getTypeForUnionTypeMembersAssignableToType((BUnionType) testType, expressionType) != symTable.semanticError; case TypeTags.FINITE: return types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) testType, expressionType) != symTable.semanticError; } return false; } private <E extends BLangExpression> void analyzeExpr(E node) { if (node == null) { return; } BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); this.isJSONContext = false; parent = myParent; checkAccess(node); } private <E extends BLangExpression> void analyzeExpr(E node, SymbolEnv env) { if (node == null) { return; } SymbolEnv prevEnv = this.env; this.env = env; BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); this.isJSONContext = false; parent = myParent; checkAccess(node); this.env = prevEnv; } @Override public void visit(BLangConstant constant) { analyzeTypeNode(constant.typeNode, env); analyzeNode(constant.expr, env); analyzeExportableTypeRef(constant.symbol, constant.symbol.type.tsymbol, false, constant.pos); constant.annAttachments.forEach(annotationAttachment -> annotationAttachment.accept(this)); } /** * This method checks for private symbols being accessed or used outside of package and|or private symbols being * used in public fields of objects/records and will fail those occurrences. * * @param node expression node to analyze */ private <E extends BLangExpression> void checkAccess(E node) { if (node.type != null) { checkAccessSymbol(node.type.tsymbol, node.pos); } if (node.getKind() == NodeKind.INVOCATION) { BLangInvocation bLangInvocation = (BLangInvocation) node; checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos); } } private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) { if (symbol == null) { return; } if (env.enclPkg.symbol.pkgID != symbol.pkgID && !Symbols.isPublic(symbol)) { dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name); } } private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { analyzeExpr(nodeList.get(i)); } } private void initNewWorkerActionSystem() { this.workerActionSystemStack.push(new WorkerActionSystem()); } private void finalizeCurrentWorkerActionSystem() { WorkerActionSystem was = this.workerActionSystemStack.pop(); if (!was.hasErrors) { this.validateWorkerInteractions(was); } } private static boolean isWorkerSend(BLangNode action) { return action.getKind() == NodeKind.WORKER_SEND; } private static boolean isWorkerSyncSend(BLangNode action) { return action.getKind() == NodeKind.WORKER_SYNC_SEND; } private String extractWorkerId(BLangNode action) { if (isWorkerSend(action)) { return ((BLangWorkerSend) action).workerIdentifier.value; } else if (isWorkerSyncSend(action)) { return ((BLangWorkerSyncSendExpr) action).workerIdentifier.value; } else { return ((BLangWorkerReceive) action).workerIdentifier.value; } } private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) { BLangNode currentAction; boolean systemRunning; do { systemRunning = false; for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) { if (worker.done()) { continue; } currentAction = worker.currentAction(); if (!isWorkerSend(currentAction) && !isWorkerSyncSend(currentAction)) { continue; } WorkerActionStateMachine otherSM = workerActionSystem.find(this.extractWorkerId(currentAction)); if (otherSM == null || !otherSM.currentIsReceive(worker.workerId)) { continue; } BLangWorkerReceive receive = (BLangWorkerReceive) otherSM.currentAction(); if (isWorkerSyncSend(currentAction)) { this.validateWorkerActionParameters((BLangWorkerSyncSendExpr) currentAction, receive); } else { this.validateWorkerActionParameters((BLangWorkerSend) currentAction, receive); } otherSM.next(); worker.next(); systemRunning = true; String channelName = generateChannelName(worker.workerId, otherSM.workerId); otherSM.node.sendsToThis.add(channelName); worker.node.sendsToThis.add(channelName); } } while (systemRunning); if (!workerActionSystem.everyoneDone()) { this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem); } } private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) { this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION, workerActionSystem.toString()); } private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) { types.checkType(receive, send.type, receive.type); addImplicitCast(send.type, receive); NodeKind kind = receive.parent.getKind(); if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR) { typeChecker.checkExpr((BLangExpression) receive.parent, receive.env); } receive.sendExpression = send.expr; } private void validateWorkerActionParameters(BLangWorkerSyncSendExpr send, BLangWorkerReceive receive) { send.receive = receive; NodeKind parentNodeKind = send.parent.getKind(); if (parentNodeKind == NodeKind.VARIABLE) { BLangSimpleVariable variable = (BLangSimpleVariable) send.parent; if (variable.isDeclaredWithVar) { variable.type = variable.symbol.type = send.expectedType = receive.matchingSendsError; } } else if (parentNodeKind == NodeKind.ASSIGNMENT) { BLangAssignment assignment = (BLangAssignment) send.parent; if (assignment.varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BSymbol varSymbol = ((BLangSimpleVarRef) assignment.varRef).symbol; if (varSymbol != null) { send.expectedType = varSymbol.type; } } } if (receive.matchingSendsError != symTable.nilType && parentNodeKind == NodeKind.EXPRESSION_STATEMENT) { dlog.error(send.pos, DiagnosticCode.ASSIGNMENT_REQUIRED); } else { types.checkType(send.pos, receive.matchingSendsError, send.expectedType, DiagnosticCode.INCOMPATIBLE_TYPES); } types.checkType(receive, send.type, receive.type); addImplicitCast(send.type, receive); NodeKind kind = receive.parent.getKind(); if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR) { typeChecker.checkExpr((BLangExpression) receive.parent, receive.env); } receive.sendExpression = send; } private void addImplicitCast(BType actualType, BLangWorkerReceive receive) { if (receive.type != null && receive.type != symTable.semanticError) { types.setImplicitCastExpr(receive, actualType, receive.type); receive.type = actualType; } } private boolean checkNextBreakValidityInTransaction() { return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0; } private boolean checkReturnValidityInTransaction() { return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek()) && transactionCount > 0; } private boolean isValidTransactionBlock() { return !(this.withinRetryBlock || this.withinAbortedBlock || this.withinCommittedBlock); } private void validateMainFunction(BLangFunction funcNode) { if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) { return; } if (!Symbols.isPublic(funcNode.symbol)) { this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC); } funcNode.requiredParams.forEach(param -> { if (!param.type.isAnydata()) { this.dlog.error(param.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA, param.type); } }); if (funcNode.restParam != null && !funcNode.restParam.type.isAnydata()) { this.dlog.error(funcNode.restParam.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA, funcNode.restParam.type); } types.validateErrorOrNilReturn(funcNode, DiagnosticCode.MAIN_RETURN_SHOULD_BE_ERROR_OR_NIL); } private void validateModuleInitFunction(BLangFunction funcNode) { if (funcNode.attachedFunction || !Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcNode.name.value)) { return; } if (Symbols.isPublic(funcNode.symbol)) { this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_BE_PUBLIC); } if (!funcNode.requiredParams.isEmpty() || funcNode.restParam != null) { this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_HAVE_PARAMS); } types.validateErrorOrNilReturn(funcNode, DiagnosticCode.MODULE_INIT_RETURN_SHOULD_BE_ERROR_OR_NIL); } private boolean getIsJSONContext(BType... arg) { if (this.isJSONContext) { return true; } for (BType type : arg) { if (types.isJSONContext(type)) { return true; } } return false; } private BType getErrorTypes(BType bType) { BType errorType = symTable.semanticError; int tag = bType.tag; if (tag == TypeTags.ERROR) { errorType = bType; } else if (tag == TypeTags.UNION) { LinkedHashSet<BType> errTypes = new LinkedHashSet<>(); Set<BType> memTypes = ((BUnionType) bType).getMemberTypes(); for (BType memType : memTypes) { if (memType.tag == TypeTags.ERROR) { errTypes.add(memType); } } errorType = errTypes.size() == 1 ? errTypes.iterator().next() : BUnionType.create(null, errTypes); } return errorType; } /** * This class contains the state machines for a set of workers. */ private static class WorkerActionSystem { public List<WorkerActionStateMachine> finshedWorkers = new ArrayList<>(); private Stack<WorkerActionStateMachine> workerActionStateMachines = new Stack<>(); private boolean hasErrors = false; public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos, BLangFunction node) { workerActionStateMachines.push(new WorkerActionStateMachine(pos, workerId, node)); } public void endWorkerActionStateMachine() { finshedWorkers.add(workerActionStateMachines.pop()); } public void addWorkerAction(BLangNode action) { this.workerActionStateMachines.peek().actions.add(action); } public WorkerActionStateMachine find(String workerId) { for (WorkerActionStateMachine worker : this.finshedWorkers) { if (worker.workerId.equals(workerId)) { return worker; } } throw new AssertionError("Reference to non existing worker " + workerId); } public boolean everyoneDone() { return this.finshedWorkers.stream().allMatch(WorkerActionStateMachine::done); } public DiagnosticPos getRootPosition() { return this.finshedWorkers.iterator().next().pos; } @Override public String toString() { return this.finshedWorkers.toString(); } public String currentWorkerId() { return workerActionStateMachines.peek().workerId; } } /** * This class represents a state machine to maintain the state of the send/receive * actions of a worker. */ private static class WorkerActionStateMachine { private static final String WORKER_SM_FINISHED = "FINISHED"; public int currentState; public List<BLangNode> actions = new ArrayList<>(); public DiagnosticPos pos; public String workerId; public BLangFunction node; public WorkerActionStateMachine(DiagnosticPos pos, String workerId, BLangFunction node) { this.pos = pos; this.workerId = workerId; this.node = node; } public boolean done() { return this.actions.size() == this.currentState; } public BLangNode currentAction() { return this.actions.get(this.currentState); } public boolean currentIsReceive(String sourceWorkerId) { if (this.done()) { return false; } BLangNode action = this.currentAction(); return !isWorkerSend(action) && !isWorkerSyncSend(action) && ((BLangWorkerReceive) action).workerIdentifier.value.equals(sourceWorkerId); } public void next() { this.currentState++; } @Override public String toString() { if (this.done()) { return WORKER_SM_FINISHED; } else { BLangNode action = this.currentAction(); if (isWorkerSend(action)) { return ((BLangWorkerSend) action).toActionString(); } else if (isWorkerSyncSend(action)) { return ((BLangWorkerSyncSendExpr) action).toActionString(); } else { return ((BLangWorkerReceive) action).toActionString(); } } } } private void checkExperimentalFeatureValidity(ExperimentalFeatures constructName, DiagnosticPos pos) { if (enableExperimentalFeatures) { return; } dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, constructName.value); } public static String generateChannelName(String source, String target) { return source + "->" + target; } /** * Experimental feature list for JBallerina 1.0.0. * * @since JBallerina 1.0.0 */ private enum ExperimentalFeatures { TRANSACTIONS("transaction"), LOCK("lock"), XML_ACCESS("xml access expression"), XML_ATTRIBUTES_ACCESS("xml attribute expression"), ; private String value; private ExperimentalFeatures(String value) { this.value = value; } @Override public String toString() { return value; } } }
class CodeAnalyzer extends BLangNodeVisitor { private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY = new CompilerContext.Key<>(); private static final String NULL_LITERAL = "null"; private final SymbolResolver symResolver; private int loopCount; private int transactionCount; private boolean statementReturns; private boolean lastStatement; private boolean withinRetryBlock; private boolean withinLockBlock; private int workerCount; private SymbolTable symTable; private Types types; private BLangDiagnosticLogHelper dlog; private TypeChecker typeChecker; private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>(); private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>(); private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>(); private BLangNode parent; private Names names; private SymbolEnv env; private final Stack<LinkedHashSet<BType>> returnTypes = new Stack<>(); private boolean withinAbortedBlock; private boolean withinCommittedBlock; private boolean isJSONContext; private boolean enableExperimentalFeatures; public static CodeAnalyzer getInstance(CompilerContext context) { CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY); if (codeGenerator == null) { codeGenerator = new CodeAnalyzer(context); } return codeGenerator; } public CodeAnalyzer(CompilerContext context) { context.put(CODE_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLogHelper.getInstance(context); this.typeChecker = TypeChecker.getInstance(context); this.names = Names.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.enableExperimentalFeatures = Boolean.parseBoolean( CompilerOptions.getInstance(context).get(CompilerOptionName.EXPERIMENTAL_FEATURES_ENABLED)); } private void resetFunction() { this.resetStatementReturns(); } private void resetStatementReturns() { this.statementReturns = false; } private void resetLastStatement() { this.lastStatement = false; } public BLangPackage analyze(BLangPackage pkgNode) { pkgNode.accept(this); return pkgNode; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) { return; } parent = pkgNode; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol); analyzeTopLevelNodes(pkgNode, pkgEnv); pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage)); } private void analyzeTopLevelNodes(BLangPackage pkgNode, SymbolEnv pkgEnv) { pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv)); pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE); parent = null; } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); parent = myParent; this.env = prevEnv; } private void analyzeTypeNode(BLangType node, SymbolEnv env) { if (node == null) { return; } analyzeNode(node, env); } @Override public void visit(BLangCompilationUnit compUnitNode) { compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env)); } public void visit(BLangTypeDefinition typeDefinition) { analyzeTypeNode(typeDefinition.typeNode, this.env); typeDefinition.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, env)); } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { analyzeNode(bLangTupleVariableDef.var, this.env); } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { analyzeNode(bLangRecordVariableDef.var, this.env); } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { analyzeNode(bLangErrorVariableDef.errorVariable, this.env); } @Override public void visit(BLangFunction funcNode) { boolean isLambda = funcNode.flagSet.contains(Flag.LAMBDA); if (isLambda) { return; } validateParams(funcNode); if (Symbols.isPublic(funcNode.symbol)) { funcNode.symbol.params.forEach(symbol -> analyzeExportableTypeRef(funcNode.symbol, symbol.type.tsymbol, true, funcNode.pos)); if (funcNode.symbol.restParam != null) { analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.restParam.type.tsymbol, true, funcNode.restParam.pos); } analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.retType.tsymbol, true, funcNode.returnTypeNode.pos); } this.validateMainFunction(funcNode); this.validateModuleInitFunction(funcNode); try { this.initNewWorkerActionSystem(); this.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME, funcNode.pos, funcNode); this.visitFunction(funcNode); this.workerActionSystemStack.peek().endWorkerActionStateMachine(); } finally { this.finalizeCurrentWorkerActionSystem(); } funcNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, env)); } private void validateParams(BLangFunction funcNode) { for (BLangSimpleVariable parameter : funcNode.requiredParams) { analyzeNode(parameter, env); } if (funcNode.restParam != null) { analyzeNode(funcNode.restParam, env); } } private void visitFunction(BLangFunction funcNode) { SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); this.returnWithintransactionCheckStack.push(true); this.doneWithintransactionCheckStack.push(true); this.returnTypes.push(new LinkedHashSet<>()); this.resetFunction(); if (Symbols.isNative(funcNode.symbol)) { return; } if (isPublicInvokableNode(funcNode)) { analyzeNode(funcNode.returnTypeNode, invokableEnv); } /* the body can be null in the case of Object type function declarations */ if (funcNode.body != null) { analyzeNode(funcNode.body, invokableEnv); boolean isNilableReturn = funcNode.symbol.type.getReturnType().isNullable(); if (!isNilableReturn && !this.statementReturns) { this.dlog.error(funcNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN, funcNode.getKind().toString().toLowerCase()); } } this.returnTypes.pop(); this.returnWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); } private boolean isPublicInvokableNode(BLangInvokableNode invNode) { return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) || Symbols.isPublic(invNode.symbol.owner)); } @Override public void visit(BLangBlockFunctionBody body) { final SymbolEnv blockEnv = SymbolEnv.createFuncBodyEnv(body, env); for (BLangStatement e : body.stmts) { analyzeNode(e, blockEnv); } this.resetLastStatement(); } @Override public void visit(BLangExprFunctionBody body) { analyzeExpr(body.expr); this.statementReturns = true; this.resetLastStatement(); } @Override public void visit(BLangExternalFunctionBody body) { } @Override public void visit(BLangForkJoin forkJoin) { if (forkJoin.workers.isEmpty()) { dlog.error(forkJoin.pos, DiagnosticCode.INVALID_FOR_JOIN_SYNTAX_EMPTY_FORK); } } @Override public void visit(BLangWorker worker) { /* ignore, remove later */ } @Override public void visit(BLangEndpoint endpointNode) { } @Override public void visit(BLangTransaction transactionNode) { checkExperimentalFeatureValidity(ExperimentalFeatures.TRANSACTIONS, transactionNode.pos); this.checkStatementExecutionValidity(transactionNode); if (!isValidTransactionBlock()) { this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER); return; } this.loopWithintransactionCheckStack.push(false); this.returnWithintransactionCheckStack.push(false); this.doneWithintransactionCheckStack.push(false); this.transactionCount++; if (this.transactionCount > 1) { this.dlog.error(transactionNode.pos, DiagnosticCode.NESTED_TRANSACTIONS_ARE_INVALID); } analyzeNode(transactionNode.transactionBody, env); this.transactionCount--; this.resetLastStatement(); if (transactionNode.onRetryBody != null) { this.withinRetryBlock = true; analyzeNode(transactionNode.onRetryBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinRetryBlock = false; } if (transactionNode.abortedBody != null) { this.withinAbortedBlock = true; analyzeNode(transactionNode.abortedBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinAbortedBlock = false; } if (transactionNode.committedBody != null) { this.withinCommittedBlock = true; analyzeNode(transactionNode.committedBody, env); this.resetStatementReturns(); this.resetLastStatement(); this.withinCommittedBlock = false; } this.returnWithintransactionCheckStack.pop(); this.loopWithintransactionCheckStack.pop(); this.doneWithintransactionCheckStack.pop(); analyzeExpr(transactionNode.retryCount); } @Override public void visit(BLangAbort abortNode) { if (this.transactionCount == 0) { this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } @Override public void visit(BLangRetry retryNode) { if (this.transactionCount == 0) { this.dlog.error(retryNode.pos, DiagnosticCode.RETRY_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK); return; } this.lastStatement = true; } private void checkUnreachableCode(BLangStatement stmt) { if (this.statementReturns) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetStatementReturns(); } if (lastStatement) { this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE); this.resetLastStatement(); } } private void checkStatementExecutionValidity(BLangStatement stmt) { this.checkUnreachableCode(stmt); } @Override public void visit(BLangBlockStmt blockNode) { final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv)); this.resetLastStatement(); } @Override public void visit(BLangReturn returnStmt) { this.checkStatementExecutionValidity(returnStmt); if (checkReturnValidityInTransaction()) { this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.statementReturns = true; analyzeExpr(returnStmt.expr); this.returnTypes.peek().add(returnStmt.expr.type); } @Override public void visit(BLangIf ifStmt) { this.checkStatementExecutionValidity(ifStmt); analyzeNode(ifStmt.body, env); boolean ifStmtReturns = this.statementReturns; this.resetStatementReturns(); if (ifStmt.elseStmt != null) { analyzeNode(ifStmt.elseStmt, env); this.statementReturns = ifStmtReturns && this.statementReturns; } analyzeExpr(ifStmt.expr); } @Override public void visit(BLangMatch matchStmt) { analyzeExpr(matchStmt.expr); boolean staticLastPattern = false; if (!matchStmt.getStaticPatternClauses().isEmpty()) { staticLastPattern = analyzeStaticMatchPatterns(matchStmt); } boolean structuredLastPattern = false; if (!matchStmt.getStructuredPatternClauses().isEmpty()) { structuredLastPattern = analyzeStructuredMatchPatterns(matchStmt); } if (!matchStmt.getPatternClauses().isEmpty()) { analyzeEmptyMatchPatterns(matchStmt); analyzeMatchedPatterns(matchStmt, staticLastPattern, structuredLastPattern); } } @Override public void visit(BLangMatchStaticBindingPatternClause patternClause) { analyzeNode(patternClause.matchExpr, env); analyzeNode(patternClause.body, env); resetStatementReturns(); } @Override public void visit(BLangMatchStructuredBindingPatternClause patternClause) { analyzeNode(patternClause.matchExpr, env); analyzeNode(patternClause.body, env); resetStatementReturns(); } private void analyzeMatchedPatterns(BLangMatch matchStmt, boolean staticLastPattern, boolean structuredLastPattern) { if (staticLastPattern && structuredLastPattern) { dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CONTAINS_TWO_DEFAULT_PATTERNS); } if ((staticLastPattern && !hasErrorType(matchStmt.exprTypes)) || structuredLastPattern) { if (matchStmt.getPatternClauses().size() == 1) { dlog.error(matchStmt.getPatternClauses().get(0).pos, DiagnosticCode.MATCH_STMT_PATTERN_ALWAYS_MATCHES); } this.checkStatementExecutionValidity(matchStmt); boolean matchStmtReturns = true; for (BLangMatchBindingPatternClause patternClause : matchStmt.getPatternClauses()) { analyzeNode(patternClause.body, env); matchStmtReturns = matchStmtReturns && this.statementReturns; this.resetStatementReturns(); } this.statementReturns = matchStmtReturns; } } private boolean hasErrorType(List<BType> typeList) { return typeList.stream().anyMatch(t -> types.isAssignable(t, symTable.errorType)); } private boolean analyzeStructuredMatchPatterns(BLangMatch matchStmt) { if (matchStmt.exprTypes.isEmpty()) { return false; } for (BLangMatchStructuredBindingPatternClause patternClause : matchStmt.getStructuredPatternClauses()) { analyzeNode(patternClause, env); } return analyseStructuredBindingPatterns(matchStmt.getStructuredPatternClauses(), hasErrorType(matchStmt.exprTypes)); } /** * This method is used to check structured `var []`, `var {}` & static `[]`, `{}` match pattern. * * @param matchStmt the match statement containing structured & static match patterns. */ private void analyzeEmptyMatchPatterns(BLangMatch matchStmt) { List<BLangMatchBindingPatternClause> emptyLists = new ArrayList<>(); List<BLangMatchBindingPatternClause> emptyRecords = new ArrayList<>(); for (BLangMatchBindingPatternClause pattern : matchStmt.patternClauses) { if (pattern.getKind() == NodeKind.MATCH_STATIC_PATTERN_CLAUSE) { BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) pattern; if (staticPattern.literal.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) { BLangListConstructorExpr listLiteral = (BLangListConstructorExpr) staticPattern.literal; if (listLiteral.exprs.isEmpty()) { emptyLists.add(pattern); } } else if (staticPattern.literal.getKind() == NodeKind.RECORD_LITERAL_EXPR) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) staticPattern.literal; if (recordLiteral.fields.isEmpty()) { emptyRecords.add(pattern); } } } else if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) structuredPattern.bindingPatternVariable; if (tupleVariable.memberVariables.isEmpty() && tupleVariable.restVariable == null) { emptyLists.add(pattern); } } else if (structuredPattern.bindingPatternVariable.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariable recordVariable = (BLangRecordVariable) structuredPattern.bindingPatternVariable; if (recordVariable.variableList.isEmpty() && recordVariable.restParam == null) { emptyRecords.add(pattern); } } } } if (emptyLists.size() > 1) { for (int i = 1; i < emptyLists.size(); i++) { dlog.error(emptyLists.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } if (emptyRecords.size() > 1) { for (int i = 1; i < emptyRecords.size(); i++) { dlog.error(emptyRecords.get(i).pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); } } } /** * This method is used to check the isLike test in a static match pattern. * @param matchStmt the match statment containing static match patterns. */ private boolean analyzeStaticMatchPatterns(BLangMatch matchStmt) { if (matchStmt.exprTypes.isEmpty()) { return false; } List<BLangMatchStaticBindingPatternClause> matchedPatterns = new ArrayList<>(); for (BLangMatchStaticBindingPatternClause pattern : matchStmt.getStaticPatternClauses()) { analyzeNode(pattern, env); List<BType> matchedExpTypes = matchStmt.exprTypes .stream() .filter(exprType -> isValidStaticMatchPattern(exprType, pattern.literal)) .collect(Collectors.toList()); if (matchedExpTypes.isEmpty()) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN); continue; } this.isJSONContext = types.isJSONContext(matchStmt.expr.type); analyzeNode(pattern.literal, env); matchedPatterns.add(pattern); } if (matchedPatterns.isEmpty()) { return false; } return analyzeStaticPatterns(matchedPatterns, hasErrorType(matchStmt.exprTypes)); } private boolean analyzeStaticPatterns(List<BLangMatchStaticBindingPatternClause> matchedPatterns, boolean errorTypeInMatchExpr) { BLangMatchStaticBindingPatternClause finalPattern = matchedPatterns.get(matchedPatterns.size() - 1); if (finalPattern.literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) finalPattern.literal).variableName.value.equals(Names.IGNORE.value) && !errorTypeInMatchExpr) { finalPattern.isLastPattern = true; } for (int i = 0; i < matchedPatterns.size() - 1; i++) { BLangExpression precedingPattern = matchedPatterns.get(i).literal; for (int j = i + 1; j < matchedPatterns.size(); j++) { BLangExpression pattern = matchedPatterns.get(j).literal; if (checkLiteralSimilarity(precedingPattern, pattern)) { dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); matchedPatterns.remove(j--); } } } return finalPattern.isLastPattern; } private boolean analyseStructuredBindingPatterns(List<BLangMatchStructuredBindingPatternClause> clauses, boolean errorTypeInMatchExpr) { BLangMatchStructuredBindingPatternClause finalPattern = clauses.get(clauses.size() - 1); if (finalPattern.bindingPatternVariable.getKind() == NodeKind.VARIABLE && finalPattern.typeGuardExpr == null && !(errorTypeInMatchExpr && isWildcardMatchPattern(finalPattern))) { finalPattern.isLastPattern = true; } BLangMatchStructuredBindingPatternClause currentPattern; BLangMatchStructuredBindingPatternClause precedingPattern; for (int i = 0; i < clauses.size(); i++) { precedingPattern = clauses.get(i); if (precedingPattern.typeGuardExpr != null) { analyzeExpr(precedingPattern.typeGuardExpr); } for (int j = i + 1; j < clauses.size(); j++) { currentPattern = clauses.get(j); BLangVariable precedingVar = precedingPattern.bindingPatternVariable; BLangVariable currentVar = currentPattern.bindingPatternVariable; if (checkStructuredPatternSimilarity(precedingVar, currentVar, errorTypeInMatchExpr) && checkTypeGuardSimilarity(precedingPattern.typeGuardExpr, currentPattern.typeGuardExpr)) { dlog.error(currentVar.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN); clauses.remove(j--); } } } return finalPattern.isLastPattern; } private boolean isWildcardMatchPattern(BLangMatchStructuredBindingPatternClause finalPattern) { return ((BLangSimpleVariable) finalPattern.bindingPatternVariable).name.value.equals(Names.IGNORE.value); } /** * This method will check if two patterns are similar to each other. * Having similar patterns in the match block will result in unreachable pattern. * * @param precedingPattern pattern taken to compare similarity. * @param pattern the pattern that the precedingPattern is checked for similarity. * @return true if both patterns are similar. */ private boolean checkLiteralSimilarity(BLangExpression precedingPattern, BLangExpression pattern) { if (precedingPattern.getKind() == NodeKind.BINARY_EXPR) { BLangBinaryExpr precedingBinaryExpr = (BLangBinaryExpr) precedingPattern; BLangExpression precedingLhsExpr = precedingBinaryExpr.lhsExpr; BLangExpression precedingRhsExpr = precedingBinaryExpr.rhsExpr; return checkLiteralSimilarity(precedingLhsExpr, pattern) || checkLiteralSimilarity(precedingRhsExpr, pattern); } if (pattern.getKind() == NodeKind.BINARY_EXPR) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) pattern; BLangExpression lhsExpr = binaryExpr.lhsExpr; BLangExpression rhsExpr = binaryExpr.rhsExpr; return checkLiteralSimilarity(precedingPattern, lhsExpr) || checkLiteralSimilarity(precedingPattern, rhsExpr); } switch (precedingPattern.type.tag) { case TypeTags.MAP: if (pattern.type.tag == TypeTags.MAP) { BLangRecordLiteral precedingRecordLiteral = (BLangRecordLiteral) precedingPattern; Map<String, BLangExpression> recordLiteral = ((BLangRecordLiteral) pattern).fields .stream() .map(field -> (BLangRecordKeyValueField) field) .collect(Collectors.toMap( keyValuePair -> ((BLangSimpleVarRef) keyValuePair.key.expr).variableName.value, BLangRecordKeyValueField::getValue )); for (int i = 0; i < precedingRecordLiteral.fields.size(); i++) { BLangRecordKeyValueField bLangRecordKeyValue = (BLangRecordKeyValueField) precedingRecordLiteral.fields.get(i); String key = ((BLangSimpleVarRef) bLangRecordKeyValue.key.expr).variableName.value; if (!recordLiteral.containsKey(key)) { return false; } if (!checkLiteralSimilarity(bLangRecordKeyValue.valueExpr, recordLiteral.get(key))) { return false; } } return true; } return false; case TypeTags.TUPLE: if (pattern.type.tag == TypeTags.TUPLE) { BLangListConstructorExpr precedingTupleLiteral = (BLangListConstructorExpr) precedingPattern; BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) pattern; if (precedingTupleLiteral.exprs.size() != tupleLiteral.exprs.size()) { return false; } return IntStream.range(0, precedingTupleLiteral.exprs.size()) .allMatch(i -> checkLiteralSimilarity(precedingTupleLiteral.exprs.get(i), tupleLiteral.exprs.get(i))); } return false; case TypeTags.INT: case TypeTags.BYTE: case TypeTags.FLOAT: case TypeTags.DECIMAL: case TypeTags.STRING: case TypeTags.BOOLEAN: if (precedingPattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BConstantSymbol precedingPatternSym = (BConstantSymbol) ((BLangSimpleVarRef) precedingPattern).symbol; if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { if (!((BLangSimpleVarRef) pattern).variableName.value.equals(Names.IGNORE.value)) { BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol; return precedingPatternSym.value.equals(patternSym.value); } return false; } BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ? (BLangLiteral) ((BLangGroupExpr) pattern).expression : (BLangLiteral) pattern; return (precedingPatternSym.value.equals(literal.value)); } if (types.isValueType(pattern.type)) { BLangLiteral precedingLiteral = precedingPattern.getKind() == NodeKind.GROUP_EXPR ? (BLangLiteral) ((BLangGroupExpr) precedingPattern).expression : (BLangLiteral) precedingPattern; if (pattern.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { if (pattern.type.tag != TypeTags.NONE) { BConstantSymbol patternSym = (BConstantSymbol) ((BLangSimpleVarRef) pattern).symbol; return patternSym.value.equals(precedingLiteral.value); } return false; } BLangLiteral literal = pattern.getKind() == NodeKind.GROUP_EXPR ? (BLangLiteral) ((BLangGroupExpr) pattern).expression : (BLangLiteral) pattern; return (precedingLiteral.value.equals(literal.value)); } return false; case TypeTags.ANY: if (pattern.type.tag == TypeTags.ERROR) { return false; } return true; default: return false; } } /** * This method will determine if the type guard of the preceding pattern will result in the current pattern * being unreachable. * * @param precedingGuard type guard of the preceding structured pattern * @param currentGuard type guard of the cuurent structured pattern * @return true if the current pattern is unreachable due to the type guard of the preceding pattern */ private boolean checkTypeGuardSimilarity(BLangExpression precedingGuard, BLangExpression currentGuard) { if (precedingGuard != null && currentGuard != null) { if (precedingGuard.getKind() == NodeKind.TYPE_TEST_EXPR && currentGuard.getKind() == NodeKind.TYPE_TEST_EXPR && ((BLangTypeTestExpr) precedingGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangTypeTestExpr) currentGuard).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangTypeTestExpr precedingTypeTest = (BLangTypeTestExpr) precedingGuard; BLangTypeTestExpr currentTypeTest = (BLangTypeTestExpr) currentGuard; return ((BLangSimpleVarRef) precedingTypeTest.expr).variableName.toString().equals( ((BLangSimpleVarRef) currentTypeTest.expr).variableName.toString()) && precedingTypeTest.typeNode.type.tag == currentTypeTest.typeNode.type.tag; } return false; } return currentGuard != null || precedingGuard == null; } /** * This method will determine if the current structured pattern will be unreachable due to a preceding pattern. * * @param precedingVar the structured pattern that appears on top * @param var the structured pattern that appears after the precedingVar * @param errorTypeInMatchExpr * @return true if the the current pattern is unreachable due to the preceding pattern */ private boolean checkStructuredPatternSimilarity(BLangVariable precedingVar, BLangVariable var, boolean errorTypeInMatchExpr) { if (precedingVar.type.tag == TypeTags.SEMANTIC_ERROR || var.type.tag == TypeTags.SEMANTIC_ERROR) { return false; } if (precedingVar.getKind() == NodeKind.RECORD_VARIABLE && var.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariable precedingRecVar = (BLangRecordVariable) precedingVar; BLangRecordVariable recVar = (BLangRecordVariable) var; Map<String, BLangVariable> recVarAsMap = recVar.variableList.stream() .collect(Collectors.toMap( keyValue -> keyValue.key.value, keyValue -> keyValue.valueBindingPattern )); if (precedingRecVar.variableList.size() > recVar.variableList.size()) { return false; } for (int i = 0; i < precedingRecVar.variableList.size(); i++) { BLangRecordVariableKeyValue precedingKeyValue = precedingRecVar.variableList.get(i); if (!recVarAsMap.containsKey(precedingKeyValue.key.value)) { return false; } if (!checkStructuredPatternSimilarity( precedingKeyValue.valueBindingPattern, recVarAsMap.get(precedingKeyValue.key.value), errorTypeInMatchExpr)) { return false; } } if (precedingRecVar.hasRestParam() && recVar.hasRestParam()) { return true; } return precedingRecVar.hasRestParam() || !recVar.hasRestParam(); } if (precedingVar.getKind() == NodeKind.TUPLE_VARIABLE && var.getKind() == NodeKind.TUPLE_VARIABLE) { List<BLangVariable> precedingMemberVars = ((BLangTupleVariable) precedingVar).memberVariables; BLangVariable precedingRestVar = ((BLangTupleVariable) precedingVar).restVariable; List<BLangVariable> memberVars = ((BLangTupleVariable) var).memberVariables; BLangVariable memberRestVar = ((BLangTupleVariable) var).restVariable; if (precedingRestVar != null && memberRestVar != null) { return true; } if (precedingRestVar == null && memberRestVar == null && precedingMemberVars.size() != memberVars.size()) { return false; } if (precedingRestVar != null && precedingMemberVars.size() > memberVars.size()) { return false; } if (memberRestVar != null) { return false; } for (int i = 0; i < memberVars.size(); i++) { if (!checkStructuredPatternSimilarity(precedingMemberVars.get(i), memberVars.get(i), errorTypeInMatchExpr)) { return false; } } return true; } if (precedingVar.getKind() == NodeKind.ERROR_VARIABLE && var.getKind() == NodeKind.ERROR_VARIABLE) { BLangErrorVariable precedingErrVar = (BLangErrorVariable) precedingVar; BLangErrorVariable errVar = (BLangErrorVariable) var; if (precedingErrVar.restDetail != null && isDirectErrorBindingPattern(precedingErrVar)) { return true; } if (errVar.restDetail != null) { return false; } if (precedingErrVar.detail != null && errVar.detail != null) { Map<String, BLangVariable> preDetails = precedingErrVar.detail.stream() .collect(Collectors.toMap(entry -> entry.key.value, entry -> entry.valueBindingPattern)); for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : errVar.detail) { BLangVariable correspondingCurDetail = preDetails.get(detailEntry.key.value); if (correspondingCurDetail == null) { return false; } boolean similar = checkStructuredPatternSimilarity(detailEntry.valueBindingPattern, correspondingCurDetail, errorTypeInMatchExpr); if (!similar) { return false; } } } return true; } if (precedingVar.getKind() == NodeKind.VARIABLE && ((BLangSimpleVariable) precedingVar).name.value.equals(Names.IGNORE.value) && var.getKind() == NodeKind.ERROR_VARIABLE) { return false; } return precedingVar.getKind() == NodeKind.VARIABLE; } private boolean isDirectErrorBindingPattern(BLangErrorVariable precedingErrVar) { return precedingErrVar.typeNode == null; } /** * This method will check if the static match pattern is valid based on the matching type. * * @param matchType type of the expression being matched. * @param literal the static match pattern. * @return true if the pattern is valid, else false. */ private boolean isValidStaticMatchPattern(BType matchType, BLangExpression literal) { if (literal.type.tag == TypeTags.NONE) { return true; } if (types.isSameType(literal.type, matchType)) { return true; } if (TypeTags.ANY == literal.type.tag) { return true; } switch (matchType.tag) { case TypeTags.ANY: case TypeTags.ANYDATA: case TypeTags.JSON: return true; case TypeTags.UNION: BUnionType unionMatchType = (BUnionType) matchType; return unionMatchType.getMemberTypes() .stream() .anyMatch(memberMatchType -> isValidStaticMatchPattern(memberMatchType, literal)); case TypeTags.TUPLE: if (literal.type.tag == TypeTags.TUPLE) { BLangListConstructorExpr tupleLiteral = (BLangListConstructorExpr) literal; BTupleType literalTupleType = (BTupleType) literal.type; BTupleType matchTupleType = (BTupleType) matchType; if (literalTupleType.tupleTypes.size() != matchTupleType.tupleTypes.size()) { return false; } return IntStream.range(0, literalTupleType.tupleTypes.size()) .allMatch(i -> isValidStaticMatchPattern(matchTupleType.tupleTypes.get(i), tupleLiteral.exprs.get(i))); } break; case TypeTags.MAP: if (literal.type.tag == TypeTags.MAP) { BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal; return IntStream.range(0, mapLiteral.fields.size()) .allMatch(i -> isValidStaticMatchPattern(((BMapType) matchType).constraint, ((BLangRecordKeyValueField) mapLiteral.fields.get(i)).valueExpr)); } break; case TypeTags.RECORD: if (literal.type.tag == TypeTags.MAP) { BLangRecordLiteral mapLiteral = (BLangRecordLiteral) literal; BRecordType recordMatchType = (BRecordType) matchType; Map<String, BType> recordFields = recordMatchType.fields .stream() .collect(Collectors.toMap( field -> field.getName().getValue(), BField::getType )); for (RecordLiteralNode.RecordField field : mapLiteral.fields) { BLangRecordKeyValueField literalKeyValue = (BLangRecordKeyValueField) field; String literalKeyName; NodeKind nodeKind = literalKeyValue.key.expr.getKind(); if (nodeKind == NodeKind.SIMPLE_VARIABLE_REF) { literalKeyName = ((BLangSimpleVarRef) literalKeyValue.key.expr).variableName.value; } else if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { literalKeyName = ((BLangLiteral) literalKeyValue.key.expr).value.toString(); } else { return false; } if (recordFields.containsKey(literalKeyName)) { if (!isValidStaticMatchPattern( recordFields.get(literalKeyName), literalKeyValue.valueExpr)) { return false; } } else if (recordMatchType.sealed || !isValidStaticMatchPattern(recordMatchType.restFieldType, literalKeyValue.valueExpr)) { return false; } } return true; } break; case TypeTags.BYTE: if (literal.type.tag == TypeTags.INT) { return true; } break; case TypeTags.FINITE: if (literal.getKind() == NodeKind.LITERAL || literal.getKind() == NodeKind.NUMERIC_LITERAL) { return types.isAssignableToFiniteType(matchType, (BLangLiteral) literal); } if (literal.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) literal).symbol.getKind() == SymbolKind.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) ((BLangSimpleVarRef) literal).symbol; return types.isAssignableToFiniteType(matchType, (BLangLiteral) ((BFiniteType) constSymbol.type).getValueSpace().iterator().next()); } break; } return false; } @Override public void visit(BLangForeach foreach) { this.loopWithintransactionCheckStack.push(true); boolean statementReturns = this.statementReturns; this.checkStatementExecutionValidity(foreach); this.loopCount++; analyzeNode(foreach.body, env); this.loopCount--; this.statementReturns = statementReturns; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(foreach.collection); } @Override public void visit(BLangWhile whileNode) { this.loopWithintransactionCheckStack.push(true); boolean statementReturns = this.statementReturns; this.checkStatementExecutionValidity(whileNode); this.loopCount++; analyzeNode(whileNode.body, env); this.loopCount--; this.statementReturns = statementReturns; this.resetLastStatement(); this.loopWithintransactionCheckStack.pop(); analyzeExpr(whileNode.expr); } @Override public void visit(BLangLock lockNode) { this.checkStatementExecutionValidity(lockNode); boolean previousWithinLockBlock = this.withinLockBlock; this.withinLockBlock = true; lockNode.body.stmts.forEach(e -> analyzeNode(e, env)); this.withinLockBlock = previousWithinLockBlock; } @Override public void visit(BLangContinue continueNode) { this.checkStatementExecutionValidity(continueNode); if (this.loopCount == 0) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); if (pkgEnv == null) { return; } analyzeNode(pkgEnv.node, env); } public void visit(BLangXMLNS xmlnsNode) { /* ignore */ } public void visit(BLangService serviceNode) { } public void visit(BLangResource resourceNode) { throw new RuntimeException("Deprecated lang feature"); } private void analyzeExportableTypeRef(BSymbol owner, BTypeSymbol symbol, boolean inFuncSignature, DiagnosticPos pos) { if (!inFuncSignature && Symbols.isFlagOn(owner.flags, Flags.ANONYMOUS)) { return; } if (Symbols.isPublic(owner)) { checkForExportableType(symbol, pos); } } private void checkForExportableType(BTypeSymbol symbol, DiagnosticPos pos) { if (symbol == null || symbol.type == null || Symbols.isFlagOn(symbol.flags, Flags.TYPE_PARAM)) { return; } switch (symbol.type.tag) { case TypeTags.ARRAY: checkForExportableType(((BArrayType) symbol.type).eType.tsymbol, pos); return; case TypeTags.TUPLE: BTupleType tupleType = (BTupleType) symbol.type; tupleType.tupleTypes.forEach(t -> checkForExportableType(t.tsymbol, pos)); if (tupleType.restType != null) { checkForExportableType(tupleType.restType.tsymbol, pos); } return; case TypeTags.MAP: checkForExportableType(((BMapType) symbol.type).constraint.tsymbol, pos); return; case TypeTags.RECORD: if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) { BRecordType recordType = (BRecordType) symbol.type; recordType.fields.forEach(f -> checkForExportableType(f.type.tsymbol, pos)); if (recordType.restFieldType != null) { checkForExportableType(recordType.restFieldType.tsymbol, pos); } return; } break; case TypeTags.TABLE: BTableType tableType = (BTableType) symbol.type; if (tableType.constraint != null) { checkForExportableType(tableType.constraint.tsymbol, pos); } return; case TypeTags.STREAM: BStreamType streamType = (BStreamType) symbol.type; if (streamType.constraint != null) { checkForExportableType(streamType.constraint.tsymbol, pos); } return; case TypeTags.INVOKABLE: BInvokableType invokableType = (BInvokableType) symbol.type; if (invokableType.paramTypes != null) { for (BType paramType : invokableType.paramTypes) { checkForExportableType(paramType.tsymbol, pos); } } if (invokableType.restType != null) { checkForExportableType(invokableType.restType.tsymbol, pos); } checkForExportableType(invokableType.retType.tsymbol, pos); return; } if (!Symbols.isPublic(symbol)) { dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name); } } public void visit(BLangLetExpression letExpression) { int ownerSymTag = this.env.scope.owner.tag; if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD) { dlog.error(letExpression.pos, DiagnosticCode.LET_EXPRESSION_NOT_YET_SUPPORTED_RECORD_FIELD); } else if ((ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) { dlog.error(letExpression.pos, DiagnosticCode.LET_EXPRESSION_NOT_YET_SUPPORTED_OBJECT_FIELD); } boolean returnStateBefore = this.statementReturns; this.statementReturns = false; for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { analyzeNode((BLangNode) letVariable.definitionNode, letExpression.env); } this.statementReturns = returnStateBefore; analyzeExpr(letExpression.expr, letExpression.env); } public void visit(BLangSimpleVariable varNode) { analyzeTypeNode(varNode.typeNode, this.env); analyzeExpr(varNode.expr); if (Objects.isNull(varNode.symbol)) { return; } if (!Symbols.isPublic(varNode.symbol)) { return; } int ownerSymTag = this.env.scope.owner.tag; if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD || (ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) { analyzeExportableTypeRef(this.env.scope.owner, varNode.type.tsymbol, false, varNode.pos); } else if ((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) { analyzeExportableTypeRef(varNode.symbol, varNode.type.tsymbol, false, varNode.pos); } varNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, env)); } private void checkWorkerPeerWorkerUsageInsideWorker(DiagnosticPos pos, BSymbol symbol, SymbolEnv env) { if ((symbol.flags & Flags.WORKER) == Flags.WORKER) { if (isCurrentPositionInWorker(env) && env.scope.lookup(symbol.name).symbol == null) { if (referingForkedWorkerOutOfFork(symbol, env)) { return; } dlog.error(pos, DiagnosticCode.INVALID_WORKER_REFERRENCE, symbol.name); } } } private boolean isCurrentPositionInWorker(SymbolEnv env) { if (env.enclInvokable != null && env.enclInvokable.flagSet.contains(Flag.WORKER)) { return true; } if (env.enclEnv != null && !(env.enclEnv.node.getKind() == NodeKind.PACKAGE || env.enclEnv.node.getKind() == NodeKind.OBJECT_TYPE)) { return isCurrentPositionInWorker(env.enclEnv); } return false; } private boolean referingForkedWorkerOutOfFork(BSymbol symbol, SymbolEnv env) { return (symbol.flags & Flags.FORKED) == Flags.FORKED && env.enclInvokable.getKind() == NodeKind.FUNCTION && ((BLangFunction) env.enclInvokable).anonForkName == null; } @Override public void visit(BLangTupleVariable bLangTupleVariable) { if (bLangTupleVariable.typeNode != null) { analyzeNode(bLangTupleVariable.typeNode, this.env); } analyzeExpr(bLangTupleVariable.expr); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { if (bLangRecordVariable.typeNode != null) { analyzeNode(bLangRecordVariable.typeNode, this.env); } analyzeExpr(bLangRecordVariable.expr); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { if (bLangErrorVariable.typeNode != null) { analyzeNode(bLangErrorVariable.typeNode, this.env); } analyzeExpr(bLangErrorVariable.expr); } private BType getNilableType(BType type) { if (type.isNullable()) { return type; } BUnionType unionType = BUnionType.create(null); if (type.tag == TypeTags.UNION) { LinkedHashSet<BType> memTypes = new LinkedHashSet<>(((BUnionType) type).getMemberTypes()); unionType.addAll(memTypes); } unionType.add(type); unionType.add(symTable.nilType); return unionType; } public void visit(BLangIdentifier identifierNode) { /* ignore */ } public void visit(BLangAnnotation annotationNode) { annotationNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, env)); } public void visit(BLangAnnotationAttachment annAttachmentNode) { BAnnotationSymbol annotationSymbol = annAttachmentNode.annotationSymbol; if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) { dlog.warning(annAttachmentNode.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, annotationSymbol); } } public void visit(BLangSimpleVariableDef varDefNode) { this.checkStatementExecutionValidity(varDefNode); analyzeNode(varDefNode.var, env); } public void visit(BLangCompoundAssignment compoundAssignment) { this.checkStatementExecutionValidity(compoundAssignment); analyzeExpr(compoundAssignment.varRef); analyzeExpr(compoundAssignment.expr); } public void visit(BLangAssignment assignNode) { this.checkStatementExecutionValidity(assignNode); analyzeExpr(assignNode.varRef); analyzeExpr(assignNode.expr); } public void visit(BLangRecordDestructure stmt) { this.checkDuplicateVarRefs(getVarRefs(stmt.varRef)); this.checkStatementExecutionValidity(stmt); analyzeExpr(stmt.varRef); analyzeExpr(stmt.expr); } public void visit(BLangErrorDestructure stmt) { this.checkDuplicateVarRefs(getVarRefs(stmt.varRef)); this.checkStatementExecutionValidity(stmt); analyzeExpr(stmt.varRef); analyzeExpr(stmt.expr); } @Override public void visit(BLangTupleDestructure stmt) { this.checkDuplicateVarRefs(getVarRefs(stmt.varRef)); this.checkStatementExecutionValidity(stmt); analyzeExpr(stmt.varRef); analyzeExpr(stmt.expr); } private void checkDuplicateVarRefs(List<BLangExpression> varRefs) { checkDuplicateVarRefs(varRefs, new HashSet<>()); } private void checkDuplicateVarRefs(List<BLangExpression> varRefs, Set<BSymbol> symbols) { for (BLangExpression varRef : varRefs) { if (varRef == null || (varRef.getKind() != NodeKind.SIMPLE_VARIABLE_REF && varRef.getKind() != NodeKind.RECORD_VARIABLE_REF && varRef.getKind() != NodeKind.ERROR_VARIABLE_REF && varRef.getKind() != NodeKind.TUPLE_VARIABLE_REF)) { continue; } if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF && names.fromIdNode(((BLangSimpleVarRef) varRef).variableName) == Names.IGNORE) { continue; } if (varRef.getKind() == NodeKind.TUPLE_VARIABLE_REF) { checkDuplicateVarRefs(getVarRefs((BLangTupleVarRef) varRef), symbols); } if (varRef.getKind() == NodeKind.RECORD_VARIABLE_REF) { checkDuplicateVarRefs(getVarRefs((BLangRecordVarRef) varRef), symbols); } if (varRef.getKind() == NodeKind.ERROR_VARIABLE_REF) { checkDuplicateVarRefs(getVarRefs((BLangErrorVarRef) varRef), symbols); } BLangVariableReference varRefExpr = (BLangVariableReference) varRef; if (varRefExpr.symbol != null && !symbols.add(varRefExpr.symbol)) { this.dlog.error(varRef.pos, DiagnosticCode.DUPLICATE_VARIABLE_IN_BINDING_PATTERN, varRefExpr.symbol); } } } private List<BLangExpression> getVarRefs(BLangRecordVarRef varRef) { List<BLangExpression> varRefs = varRef.recordRefFields.stream() .map(e -> e.variableReference).collect(Collectors.toList()); varRefs.add((BLangExpression) varRef.restParam); return varRefs; } private List<BLangExpression> getVarRefs(BLangErrorVarRef varRef) { List<BLangExpression> varRefs = new ArrayList<>(); varRefs.add(varRef.reason); varRefs.addAll(varRef.detail.stream().map(e -> e.expr).collect(Collectors.toList())); varRefs.add(varRef.restVar); return varRefs; } private List<BLangExpression> getVarRefs(BLangTupleVarRef varRef) { List<BLangExpression> varRefs = new ArrayList<>(varRef.expressions); varRefs.add((BLangExpression) varRef.restParam); return varRefs; } public void visit(BLangBreak breakNode) { this.checkStatementExecutionValidity(breakNode); if (this.loopCount == 0) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP); return; } if (checkNextBreakValidityInTransaction()) { this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION); return; } this.lastStatement = true; } public void visit(BLangThrow throwNode) { /* ignore */ } public void visit(BLangPanic panicNode) { this.checkStatementExecutionValidity(panicNode); this.statementReturns = true; analyzeExpr(panicNode.expr); } public void visit(BLangXMLNSStatement xmlnsStmtNode) { this.checkStatementExecutionValidity(xmlnsStmtNode); } public void visit(BLangExpressionStmt exprStmtNode) { this.checkStatementExecutionValidity(exprStmtNode); analyzeExpr(exprStmtNode.expr); validateExprStatementExpression(exprStmtNode); } private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) { BLangExpression expr = exprStmtNode.expr; if (expr.getKind() == NodeKind.WORKER_SYNC_SEND) { return; } while (expr.getKind() == NodeKind.MATCH_EXPRESSION || expr.getKind() == NodeKind.CHECK_EXPR || expr.getKind() == NodeKind.CHECK_PANIC_EXPR) { if (expr.getKind() == NodeKind.MATCH_EXPRESSION) { expr = ((BLangMatchExpression) expr).expr; } else if (expr.getKind() == NodeKind.CHECK_EXPR) { expr = ((BLangCheckedExpr) expr).expr; } else if (expr.getKind() == NodeKind.CHECK_PANIC_EXPR) { expr = ((BLangCheckPanickedExpr) expr).expr; } } if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.WAIT_EXPR) { return; } if (expr.type == symTable.nilType) { dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT); } } public void visit(BLangTryCatchFinally tryNode) { /* ignore */ } public void visit(BLangCatch catchNode) { /* ignore */ } private boolean isTopLevel() { SymbolEnv env = this.env; return env.enclInvokable.body == env.node; } private boolean isInWorker() { return env.enclInvokable.flagSet.contains(Flag.WORKER); } private boolean isCommunicationAllowedLocation(String workerIdentifier) { return (isDefaultWorkerCommunication(workerIdentifier) && isInWorker()) || isTopLevel(); } private boolean isDefaultWorkerCommunication(String workerIdentifier) { return workerIdentifier.equals(DEFAULT_WORKER_NAME); } private boolean workerExists(BType type, String workerName) { if (isDefaultWorkerCommunication(workerName) && isInWorker()) { return true; } if (type == symTable.semanticError) { return false; } return type.tag == TypeTags.FUTURE && ((BFutureType) type).workerDerivative; } public void visit(BLangWorkerSend workerSendNode) { BSymbol receiver = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerSendNode.workerIdentifier)); if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) { receiver = symTable.notFoundSymbol; } verifyPeerCommunication(workerSendNode.pos, receiver, workerSendNode.workerIdentifier.value); this.checkStatementExecutionValidity(workerSendNode); if (workerSendNode.isChannel) { analyzeExpr(workerSendNode.expr); if (workerSendNode.keyExpr != null) { analyzeExpr(workerSendNode.keyExpr); } return; } WorkerActionSystem was = this.workerActionSystemStack.peek(); BType type = workerSendNode.expr.type; if (type == symTable.semanticError) { was.hasErrors = true; } else if (!type.isAnydata()) { this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, type); } String workerName = workerSendNode.workerIdentifier.getValue(); boolean allowedLocation = isCommunicationAllowedLocation(workerName); if (!allowedLocation) { this.dlog.error(workerSendNode.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION); was.hasErrors = true; } if (!this.workerExists(workerSendNode.type, workerName)) { this.dlog.error(workerSendNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); was.hasErrors = true; } workerSendNode.type = createAccumulatedErrorTypeForMatchingRecive(workerSendNode.pos, workerSendNode.expr.type); was.addWorkerAction(workerSendNode); analyzeExpr(workerSendNode.expr); validateActionParentNode(workerSendNode.pos, workerSendNode.expr); } private BType createAccumulatedErrorTypeForMatchingRecive(DiagnosticPos pos, BType exprType) { Set<BType> returnTypesUpToNow = this.returnTypes.peek(); LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<BType>() { { Comparator.comparing(BType::toString); } }; for (BType returnType : returnTypesUpToNow) { if (returnType.tag == TypeTags.ERROR) { returnTypeAndSendType.add(returnType); } else { this.dlog.error(pos, DiagnosticCode.WORKER_SEND_AFTER_RETURN); } } returnTypeAndSendType.add(exprType); if (returnTypeAndSendType.size() > 1) { return BUnionType.create(null, returnTypeAndSendType); } else { return exprType; } } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol receiver = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) { receiver = symTable.notFoundSymbol; } verifyPeerCommunication(syncSendExpr.pos, receiver, syncSendExpr.workerIdentifier.value); validateActionParentNode(syncSendExpr.pos, syncSendExpr); String workerName = syncSendExpr.workerIdentifier.getValue(); WorkerActionSystem was = this.workerActionSystemStack.peek(); boolean allowedLocation = isCommunicationAllowedLocation(workerName); if (!allowedLocation) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_WORKER_SEND_POSITION); was.hasErrors = true; } if (!this.workerExists(syncSendExpr.workerType, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); was.hasErrors = true; } syncSendExpr.type = createAccumulatedErrorTypeForMatchingRecive(syncSendExpr.pos, syncSendExpr.expr.type); was.addWorkerAction(syncSendExpr); analyzeExpr(syncSendExpr.expr); } @Override public void visit(BLangWorkerReceive workerReceiveNode) { validateActionParentNode(workerReceiveNode.pos, workerReceiveNode); BSymbol sender = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveNode.workerIdentifier)); if ((sender.tag & SymTag.VARIABLE) != SymTag.VARIABLE) { sender = symTable.notFoundSymbol; } verifyPeerCommunication(workerReceiveNode.pos, sender, workerReceiveNode.workerIdentifier.value); if (workerReceiveNode.isChannel) { if (workerReceiveNode.keyExpr != null) { analyzeExpr(workerReceiveNode.keyExpr); } return; } WorkerActionSystem was = this.workerActionSystemStack.peek(); String workerName = workerReceiveNode.workerIdentifier.getValue(); boolean allowedLocation = isCommunicationAllowedLocation(workerName); if (!allowedLocation) { this.dlog.error(workerReceiveNode.pos, DiagnosticCode.INVALID_WORKER_RECEIVE_POSITION); was.hasErrors = true; } if (!this.workerExists(workerReceiveNode.workerType, workerName)) { this.dlog.error(workerReceiveNode.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); was.hasErrors = true; } workerReceiveNode.matchingSendsError = createAccumulatedErrorTypeForMatchingSyncSend(workerReceiveNode); was.addWorkerAction(workerReceiveNode); } private void verifyPeerCommunication(DiagnosticPos pos, BSymbol otherWorker, String otherWorkerName) { if (env.enclEnv.node.getKind() != NodeKind.FUNCTION) { return; } BLangFunction funcNode = (BLangFunction) env.enclEnv.node; Set<Flag> flagSet = funcNode.flagSet; Name workerDerivedName = names.fromString("0" + otherWorker.name.value); if (flagSet.contains(Flag.WORKER)) { if (otherWorkerName.equals(DEFAULT_WORKER_NAME)) { if (flagSet.contains(Flag.FORKED)) { dlog.error(pos, DiagnosticCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS); } return; } Scope enclFunctionScope = env.enclEnv.enclEnv.scope; BInvokableSymbol wLambda = (BInvokableSymbol) enclFunctionScope.lookup(workerDerivedName).symbol; if (wLambda != null && funcNode.anonForkName != null && !funcNode.anonForkName.equals(wLambda.enclForkName)) { dlog.error(pos, DiagnosticCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS); } } else { BInvokableSymbol wLambda = (BInvokableSymbol) env.scope.lookup(workerDerivedName).symbol; if (wLambda != null && wLambda.enclForkName != null) { dlog.error(pos, DiagnosticCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS); } } } public BType createAccumulatedErrorTypeForMatchingSyncSend(BLangWorkerReceive workerReceiveNode) { Set<BType> returnTypesUpToNow = this.returnTypes.peek(); LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>(); for (BType returnType : returnTypesUpToNow) { if (returnType.tag == TypeTags.ERROR) { returnTypeAndSendType.add(returnType); } else { this.dlog.error(workerReceiveNode.pos, DiagnosticCode.WORKER_RECEIVE_AFTER_RETURN); } } returnTypeAndSendType.add(symTable.nilType); if (returnTypeAndSendType.size() > 1) { return BUnionType.create(null, returnTypeAndSendType); } else { return symTable.nilType; } } public void visit(BLangLiteral literalExpr) { if (literalExpr.type.tag == TypeTags.NIL && NULL_LITERAL.equals(literalExpr.originalValue) && !literalExpr.isJSONContext && !this.isJSONContext) { dlog.error(literalExpr.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL); } } public void visit(BLangListConstructorExpr listConstructorExpr) { analyzeExprs(listConstructorExpr.exprs); } public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { analyzeExpr(((BLangRecordKeyValueField) field).valueExpr); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { analyzeExpr((BLangRecordLiteral.BLangRecordVarNameField) field); } else { analyzeExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr); } } Set<Object> names = new HashSet<>(); BType type = recordLiteral.type; boolean isOpenRecord = type != null && type.tag == TypeTags.RECORD && !((BRecordType) type).sealed; for (RecordLiteralNode.RecordField field : fields) { BLangExpression keyExpr; if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; BLangExpression spreadOpExpr = spreadOpField.expr; analyzeExpr(spreadOpExpr); if (spreadOpExpr.type.tag != TypeTags.RECORD) { continue; } for (BField bField : ((BRecordType) spreadOpExpr.type).fields) { if (Symbols.isOptional(bField.symbol)) { continue; } String name = bField.name.value; if (names.contains(name)) { this.dlog.error(spreadOpExpr.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL_SPREAD_OP, recordLiteral.expectedType.getKind().typeName(), name, spreadOpField); } names.add(name); } } else { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKey key = ((BLangRecordKeyValueField) field).key; keyExpr = key.expr; if (key.computedKey) { analyzeExpr(keyExpr); continue; } } else { keyExpr = (BLangRecordLiteral.BLangRecordVarNameField) field; } if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { String name = ((BLangSimpleVarRef) keyExpr).variableName.value; if (names.contains(name)) { this.dlog.error(keyExpr.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, recordLiteral.expectedType.getKind().typeName(), name); } if (isOpenRecord && ((BRecordType) type).fields.stream() .noneMatch(recField -> name.equals(recField.name.value))) { dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_IDENTIFIER_KEY, name); } names.add(name); } else if (keyExpr.getKind() == NodeKind.LITERAL || keyExpr.getKind() == NodeKind.NUMERIC_LITERAL) { Object name = ((BLangLiteral) keyExpr).value; if (names.contains(name)) { this.dlog.error(keyExpr.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, recordLiteral.parent.type.getKind().typeName(), name); } names.add(name); } } } } public void visit(BLangTableLiteral tableLiteral) { /* ignore */ } public void visit(BLangSimpleVarRef varRefExpr) { switch (varRefExpr.parent.getKind()) { case WORKER_RECEIVE: case WORKER_SEND: case WORKER_SYNC_SEND: return; default: if (varRefExpr.type != null && varRefExpr.type.tag == TypeTags.FUTURE) { checkWorkerPeerWorkerUsageInsideWorker(varRefExpr.pos, varRefExpr.symbol, this.env); } } if (varRefExpr.symbol != null && Symbols.isFlagOn(varRefExpr.symbol.flags, Flags.DEPRECATED)) { dlog.warning(varRefExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, varRefExpr); } } public void visit(BLangRecordVarRef varRefExpr) { /* ignore */ } public void visit(BLangErrorVarRef varRefExpr) { /* ignore */ } public void visit(BLangTupleVarRef varRefExpr) { /* ignore */ } public void visit(BLangFieldBasedAccess fieldAccessExpr) { analyzeExpr(fieldAccessExpr.expr); BSymbol symbol = fieldAccessExpr.symbol; if (symbol != null && Symbols.isFlagOn(fieldAccessExpr.symbol.flags, Flags.DEPRECATED)) { dlog.warning(fieldAccessExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, fieldAccessExpr); } } public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeExpr(indexAccessExpr.indexExpr); analyzeExpr(indexAccessExpr.expr); } public void visit(BLangInvocation invocationExpr) { analyzeExpr(invocationExpr.expr); analyzeExprs(invocationExpr.requiredArgs); analyzeExprs(invocationExpr.restArgs); if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) { BSymbol funcSymbol = invocationExpr.symbol; if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) { dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, invocationExpr); } } if (invocationExpr.actionInvocation || invocationExpr.async) { if (invocationExpr.actionInvocation || !this.withinLockBlock) { validateActionInvocation(invocationExpr.pos, invocationExpr); return; } dlog.error(invocationExpr.pos, invocationExpr.functionPointerInvocation ? DiagnosticCode.USAGE_OF_WORKER_WITHIN_LOCK_IS_PROHIBITED : DiagnosticCode.USAGE_OF_START_WITHIN_LOCK_IS_PROHIBITED); } } private void validateActionInvocation(DiagnosticPos pos, BLangInvocation iExpr) { if (iExpr.expr != null) { final NodeKind clientNodeKind = iExpr.expr.getKind(); if (clientNodeKind != NodeKind.SIMPLE_VARIABLE_REF && clientNodeKind != NodeKind.FIELD_BASED_ACCESS_EXPR) { dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } else if (clientNodeKind == NodeKind.FIELD_BASED_ACCESS_EXPR) { final BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) iExpr.expr; if (fieldBasedAccess.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } else { final BLangSimpleVarRef selfName = (BLangSimpleVarRef) fieldBasedAccess.expr; if (!Names.SELF.equals(selfName.symbol.name)) { dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } } } } validateActionParentNode(pos, iExpr); } /** * Actions can only occur as part of a statement or nested inside other actions. */ private void validateActionParentNode(DiagnosticPos pos, BLangNode node) { BLangNode parent = node.parent; if (parent.getKind() == NodeKind.BLOCK) { return; } while (parent != null) { final NodeKind kind = parent.getKind(); if (kind == NodeKind.ASSIGNMENT || kind == NodeKind.EXPRESSION_STATEMENT || kind == NodeKind.RETURN || kind == NodeKind.RECORD_DESTRUCTURE || kind == NodeKind.ERROR_DESTRUCTURE || kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE || kind == NodeKind.RECORD_VARIABLE || kind == NodeKind.TUPLE_VARIABLE || kind == NodeKind.ERROR_VARIABLE || kind == NodeKind.MATCH || kind == NodeKind.FOREACH) { return; } else if (kind == NodeKind.CHECK_PANIC_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.WORKER_RECEIVE || kind == NodeKind.WORKER_FLUSH || kind == NodeKind.WORKER_SEND || kind == NodeKind.WAIT_EXPR || kind == NodeKind.GROUP_EXPR || kind == NodeKind.TRAP_EXPR) { parent = parent.parent; if (parent.getKind() == NodeKind.BLOCK || parent.getKind() == NodeKind.BLOCK_FUNCTION_BODY) { return; } continue; } else if (kind == NodeKind.ELVIS_EXPR && ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION && ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) { parent = parent.parent; continue; } break; } dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR); } public void visit(BLangTypeInit cIExpr) { analyzeExprs(cIExpr.argsExpr); analyzeExpr(cIExpr.initInvocation); BType type = cIExpr.type; if (cIExpr.userDefinedType != null && Symbols.isFlagOn(type.tsymbol.flags, Flags.DEPRECATED)) { dlog.warning(cIExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, type); } } public void visit(BLangTernaryExpr ternaryExpr) { analyzeExpr(ternaryExpr.expr); boolean isJSONCtx = getIsJSONContext(ternaryExpr.type); this.isJSONContext = isJSONCtx; analyzeExpr(ternaryExpr.thenExpr); this.isJSONContext = isJSONCtx; analyzeExpr(ternaryExpr.elseExpr); } public void visit(BLangWaitExpr awaitExpr) { analyzeExpr(awaitExpr.getExpression()); validateActionParentNode(awaitExpr.pos, awaitExpr); } public void visit(BLangWaitForAllExpr waitForAllExpr) { waitForAllExpr.keyValuePairs.forEach(keyValue -> { BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr; analyzeExpr(expr); }); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { analyzeExpr(xmlElementAccess.expr); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { analyzeExpr(xmlNavigation.expr); if (xmlNavigation.childIndex != null) { if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS || xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { dlog.error(xmlNavigation.pos, DiagnosticCode.UNSUPPORTED_INDEX_IN_XML_NAVIGATION); } analyzeExpr(xmlNavigation.childIndex); } validateMethodInvocationsInXMLNavigationExpression(xmlNavigation); } private void validateMethodInvocationsInXMLNavigationExpression(BLangXMLNavigationAccess expression) { if (!expression.methodInvocationAnalyzed && expression.parent.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expression.parent; if (invocation.argExprs.contains(expression) && ((invocation.symbol.flags & Flags.LANG_LIB) != Flags.LANG_LIB)) { return; } dlog.error(invocation.pos, DiagnosticCode.UNSUPPORTED_METHOD_INVOCATION_XML_NAV); } expression.methodInvocationAnalyzed = true; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { BLangIdentifier flushWrkIdentifier = workerFlushExpr.workerIdentifier; Stack<WorkerActionSystem> workerActionSystems = this.workerActionSystemStack; WorkerActionSystem currentWrkerAction = workerActionSystems.peek(); List<BLangWorkerSend> sendStmts = getAsyncSendStmtsOfWorker(currentWrkerAction); if (flushWrkIdentifier != null) { List<BLangWorkerSend> sendsToGivenWrkr = sendStmts.stream() .filter(bLangNode -> bLangNode.workerIdentifier.equals (flushWrkIdentifier)) .collect(Collectors.toList()); if (sendsToGivenWrkr.size() == 0) { this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH_FOR_WORKER, flushWrkIdentifier, currentWrkerAction.currentWorkerId()); return; } else { sendStmts = sendsToGivenWrkr; } } else { if (sendStmts.size() == 0) { this.dlog.error(workerFlushExpr.pos, DiagnosticCode.INVALID_WORKER_FLUSH, currentWrkerAction.currentWorkerId()); return; } } workerFlushExpr.cachedWorkerSendStmts = sendStmts; validateActionParentNode(workerFlushExpr.pos, workerFlushExpr); } private List<BLangWorkerSend> getAsyncSendStmtsOfWorker(WorkerActionSystem currentWorkerAction) { List<BLangNode> actions = currentWorkerAction.workerActionStateMachines.peek().actions; return actions.stream() .filter(CodeAnalyzer::isWorkerSend) .map(bLangNode -> (BLangWorkerSend) bLangNode) .collect(Collectors.toList()); } @Override public void visit(BLangTrapExpr trapExpr) { analyzeExpr(trapExpr.expr); } public void visit(BLangBinaryExpr binaryExpr) { if (validateBinaryExpr(binaryExpr)) { boolean isJSONCtx = getIsJSONContext(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type); this.isJSONContext = isJSONCtx; analyzeExpr(binaryExpr.lhsExpr); this.isJSONContext = isJSONCtx; analyzeExpr(binaryExpr.rhsExpr); } } private boolean validateBinaryExpr(BLangBinaryExpr binaryExpr) { if (binaryExpr.lhsExpr.type.tag != TypeTags.FUTURE && binaryExpr.rhsExpr.type.tag != TypeTags.FUTURE) { return true; } BLangNode parentNode = binaryExpr.parent; if (binaryExpr.lhsExpr.type.tag == TypeTags.FUTURE || binaryExpr.rhsExpr.type.tag == TypeTags.FUTURE) { if (parentNode == null) { return false; } if (parentNode.getKind() == NodeKind.WAIT_EXPR) { return true; } } if (parentNode.getKind() != NodeKind.BINARY_EXPR && binaryExpr.opKind == OperatorKind.BITWISE_OR) { dlog.error(binaryExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED, OperatorKind.BITWISE_OR, symTable.futureType); return false; } if (parentNode.getKind() == NodeKind.BINARY_EXPR) { return validateBinaryExpr((BLangBinaryExpr) parentNode); } return true; } public void visit(BLangElvisExpr elvisExpr) { analyzeExpr(elvisExpr.lhsExpr); analyzeExpr(elvisExpr.rhsExpr); } @Override public void visit(BLangGroupExpr groupExpr) { analyzeExpr(groupExpr.expression); } public void visit(BLangUnaryExpr unaryExpr) { analyzeExpr(unaryExpr.expr); } public void visit(BLangTypedescExpr accessExpr) { /* ignore */ } public void visit(BLangXMLQName xmlQName) { /* ignore */ } public void visit(BLangXMLAttribute xmlAttribute) { analyzeExpr(xmlAttribute.name); analyzeExpr(xmlAttribute.value); } public void visit(BLangXMLElementLiteral xmlElementLiteral) { analyzeExpr(xmlElementLiteral.startTagName); analyzeExpr(xmlElementLiteral.endTagName); analyzeExprs(xmlElementLiteral.attributes); analyzeExprs(xmlElementLiteral.children); } public void visit(BLangXMLTextLiteral xmlTextLiteral) { analyzeExprs(xmlTextLiteral.textFragments); } public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { analyzeExprs(xmlCommentLiteral.textFragments); } public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { analyzeExprs(xmlProcInsLiteral.dataFragments); analyzeExpr(xmlProcInsLiteral.target); } public void visit(BLangXMLQuotedString xmlQuotedString) { analyzeExprs(xmlQuotedString.textFragments); } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { analyzeExprs(stringTemplateLiteral.exprs); } public void visit(BLangLambdaFunction bLangLambdaFunction) { boolean isWorker = false; if (bLangLambdaFunction.parent.getKind() == NodeKind.VARIABLE) { String workerVarName = ((BLangSimpleVariable) bLangLambdaFunction.parent).name.value; if (workerVarName.startsWith(WORKER_LAMBDA_VAR_PREFIX)) { String workerName = workerVarName.substring(1); isWorker = true; this.workerActionSystemStack.peek().startWorkerActionStateMachine(workerName, bLangLambdaFunction.function.pos, bLangLambdaFunction.function); } } boolean statementReturn = this.statementReturns; this.visitFunction(bLangLambdaFunction.function); this.statementReturns = statementReturn; if (isWorker) { this.workerActionSystemStack.peek().endWorkerActionStateMachine(); } } public void visit(BLangArrowFunction bLangArrowFunction) { analyzeExpr(bLangArrowFunction.body.expr); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { analyzeExpr(xmlAttributeAccessExpr.expr); analyzeExpr(xmlAttributeAccessExpr.indexExpr); } public void visit(BLangIntRangeExpression intRangeExpression) { analyzeExpr(intRangeExpression.startExpr); analyzeExpr(intRangeExpression.endExpr); } /* Type Nodes */ @Override public void visit(BLangRecordTypeNode recordTypeNode) { SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env); if (recordTypeNode.isFieldAnalyseRequired) { recordTypeNode.fields.forEach(field -> analyzeNode(field, recordEnv)); } } @Override public void visit(BLangObjectTypeNode objectTypeNode) { SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env); if (objectTypeNode.isFieldAnalyseRequired) { objectTypeNode.fields.forEach(field -> analyzeNode(field, objectEnv)); } Stream.concat(objectTypeNode.functions.stream(), Optional.ofNullable(objectTypeNode.initFunction).map(Stream::of).orElseGet(Stream::empty)) .sorted(Comparator.comparingInt(fn -> fn.pos.sLine)) .forEachOrdered(fn -> this.analyzeNode(fn, objectEnv)); } @Override public void visit(BLangValueType valueType) { /* ignore */ } @Override public void visit(BLangArrayType arrayType) { analyzeTypeNode(arrayType.elemtype, env); } public void visit(BLangBuiltInRefTypeNode builtInRefType) { /* ignore */ } public void visit(BLangConstrainedType constrainedType) { analyzeTypeNode(constrainedType.constraint, env); } public void visit(BLangStreamType streamType) { analyzeTypeNode(streamType.constraint, env); analyzeTypeNode(streamType.error, env); } public void visit(BLangErrorType errorType) { analyzeTypeNode(errorType.reasonType, env); analyzeTypeNode(errorType.detailType, env); } public void visit(BLangUserDefinedType userDefinedType) { BTypeSymbol typeSymbol = userDefinedType.type.tsymbol; if (typeSymbol != null && Symbols.isFlagOn(typeSymbol.flags, Flags.DEPRECATED)) { dlog.warning(userDefinedType.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, userDefinedType); } } public void visit(BLangTupleTypeNode tupleTypeNode) { tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env)); analyzeTypeNode(tupleTypeNode.restParamType, env); } public void visit(BLangUnionTypeNode unionTypeNode) { unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, env)); } public void visit(BLangFunctionTypeNode functionTypeNode) { functionTypeNode.params.forEach(node -> analyzeNode(node, env)); analyzeTypeNode(functionTypeNode.returnTypeNode, env); } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { /* Ignore */ } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { analyzeExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { analyzeExpr(bLangNamedArgsExpression.expr); } @Override public void visit(BLangMatchExpression bLangMatchExpression) { } @Override public void visit(BLangCheckedExpr checkedExpr) { analyzeExpr(checkedExpr.expr); if (this.env.scope.owner.getKind() == SymbolKind.PACKAGE) { return; } BType exprType = env.enclInvokable.getReturnTypeNode().type; if (!types.isAssignable(getErrorTypes(checkedExpr.expr.type), exprType)) { dlog.error(checkedExpr.pos, DiagnosticCode.CHECKED_EXPR_NO_MATCHING_ERROR_RETURN_IN_ENCL_INVOKABLE); } if (checkReturnValidityInTransaction()) { this.dlog.error(checkedExpr.pos, DiagnosticCode.CHECK_EXPRESSION_INVALID_USAGE_WITHIN_TRANSACTION_BLOCK); return; } returnTypes.peek().add(exprType); } @Override public void visit(BLangCheckPanickedExpr checkPanicExpr) { analyzeExpr(checkPanicExpr.expr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { } @Override public void visit(BLangQueryExpr queryExpr) { int fromCount = 0; for (FromClauseNode fromClauseNode : queryExpr.fromClauseList) { fromCount++; BLangExpression collection = (BLangExpression) fromClauseNode.getCollection(); if (fromCount > 1) { if (TypeTags.STREAM == collection.type.tag) { this.dlog.error(collection.pos, DiagnosticCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM); } } analyzeNode((BLangFromClause) fromClauseNode, env); } for (WhereClauseNode whereClauseNode : queryExpr.whereClauseList) { analyzeNode((BLangWhereClause) whereClauseNode, env); } analyzeNode(queryExpr.selectClause, env); } @Override public void visit(BLangQueryAction queryAction) { int fromCount = 0; for (FromClauseNode fromClauseNode : queryAction.fromClauseList) { fromCount++; BLangExpression collection = (BLangExpression) fromClauseNode.getCollection(); if (fromCount > 1) { if (TypeTags.STREAM == collection.type.tag) { this.dlog.error(collection.pos, DiagnosticCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM); } } analyzeNode((BLangFromClause) fromClauseNode, env); } for (WhereClauseNode whereClauseNode : queryAction.whereClauseList) { analyzeNode((BLangWhereClause) whereClauseNode, env); } analyzeNode(queryAction.doClause, env); validateActionParentNode(queryAction.pos, queryAction); } @Override public void visit(BLangFromClause fromClause) { analyzeExpr(fromClause.collection); } @Override public void visit(BLangWhereClause whereClause) { analyzeExpr(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { analyzeExpr(selectClause.expression); } @Override public void visit(BLangDoClause doClause) { analyzeNode(doClause.body, env); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { analyzeNode(typeTestExpr.expr, env); if (typeTestExpr.typeNode.type == symTable.semanticError || typeTestExpr.expr.type == symTable.semanticError) { return; } if (types.isAssignable(typeTestExpr.expr.type, typeTestExpr.typeNode.type)) { dlog.error(typeTestExpr.pos, DiagnosticCode.UNNECESSARY_CONDITION); return; } if (!types.isAssignable(typeTestExpr.typeNode.type, typeTestExpr.expr.type) && !indirectIntersectionExists(typeTestExpr.expr, typeTestExpr.typeNode.type)) { dlog.error(typeTestExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPE_CHECK, typeTestExpr.expr.type, typeTestExpr.typeNode.type); } } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { analyzeExpr(annotAccessExpr.expr); BAnnotationSymbol annotationSymbol = annotAccessExpr.annotationSymbol; if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) { dlog.warning(annotAccessExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_CONSTRUCT, annotationSymbol); } } private boolean indirectIntersectionExists(BLangExpression expression, BType testType) { BType expressionType = expression.type; switch (expressionType.tag) { case TypeTags.UNION: if (types.getTypeForUnionTypeMembersAssignableToType((BUnionType) expressionType, testType) != symTable.semanticError) { return true; } break; case TypeTags.FINITE: if (types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) expressionType, testType) != symTable.semanticError) { return true; } } switch (testType.tag) { case TypeTags.UNION: return types.getTypeForUnionTypeMembersAssignableToType((BUnionType) testType, expressionType) != symTable.semanticError; case TypeTags.FINITE: return types.getTypeForFiniteTypeValuesAssignableToType((BFiniteType) testType, expressionType) != symTable.semanticError; } return false; } private <E extends BLangExpression> void analyzeExpr(E node) { if (node == null) { return; } BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); this.isJSONContext = false; parent = myParent; checkAccess(node); } private <E extends BLangExpression> void analyzeExpr(E node, SymbolEnv env) { if (node == null) { return; } SymbolEnv prevEnv = this.env; this.env = env; BLangNode myParent = parent; node.parent = parent; parent = node; node.accept(this); this.isJSONContext = false; parent = myParent; checkAccess(node); this.env = prevEnv; } @Override public void visit(BLangConstant constant) { analyzeTypeNode(constant.typeNode, env); analyzeNode(constant.expr, env); analyzeExportableTypeRef(constant.symbol, constant.symbol.type.tsymbol, false, constant.pos); constant.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, env)); } /** * This method checks for private symbols being accessed or used outside of package and|or private symbols being * used in public fields of objects/records and will fail those occurrences. * * @param node expression node to analyze */ private <E extends BLangExpression> void checkAccess(E node) { if (node.type != null) { checkAccessSymbol(node.type.tsymbol, node.pos); } if (node.getKind() == NodeKind.INVOCATION) { BLangInvocation bLangInvocation = (BLangInvocation) node; checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos); } } private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) { if (symbol == null) { return; } if (env.enclPkg.symbol.pkgID != symbol.pkgID && !Symbols.isPublic(symbol)) { dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name); } } private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { analyzeExpr(nodeList.get(i)); } } private void initNewWorkerActionSystem() { this.workerActionSystemStack.push(new WorkerActionSystem()); } private void finalizeCurrentWorkerActionSystem() { WorkerActionSystem was = this.workerActionSystemStack.pop(); if (!was.hasErrors) { this.validateWorkerInteractions(was); } } private static boolean isWorkerSend(BLangNode action) { return action.getKind() == NodeKind.WORKER_SEND; } private static boolean isWorkerSyncSend(BLangNode action) { return action.getKind() == NodeKind.WORKER_SYNC_SEND; } private String extractWorkerId(BLangNode action) { if (isWorkerSend(action)) { return ((BLangWorkerSend) action).workerIdentifier.value; } else if (isWorkerSyncSend(action)) { return ((BLangWorkerSyncSendExpr) action).workerIdentifier.value; } else { return ((BLangWorkerReceive) action).workerIdentifier.value; } } private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) { BLangNode currentAction; boolean systemRunning; do { systemRunning = false; for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) { if (worker.done()) { continue; } currentAction = worker.currentAction(); if (!isWorkerSend(currentAction) && !isWorkerSyncSend(currentAction)) { continue; } WorkerActionStateMachine otherSM = workerActionSystem.find(this.extractWorkerId(currentAction)); if (otherSM == null || !otherSM.currentIsReceive(worker.workerId)) { continue; } BLangWorkerReceive receive = (BLangWorkerReceive) otherSM.currentAction(); if (isWorkerSyncSend(currentAction)) { this.validateWorkerActionParameters((BLangWorkerSyncSendExpr) currentAction, receive); } else { this.validateWorkerActionParameters((BLangWorkerSend) currentAction, receive); } otherSM.next(); worker.next(); systemRunning = true; String channelName = generateChannelName(worker.workerId, otherSM.workerId); otherSM.node.sendsToThis.add(channelName); worker.node.sendsToThis.add(channelName); } } while (systemRunning); if (!workerActionSystem.everyoneDone()) { this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem); } } private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) { this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION, workerActionSystem.toString()); } private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) { types.checkType(receive, send.type, receive.type); addImplicitCast(send.type, receive); NodeKind kind = receive.parent.getKind(); if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR) { typeChecker.checkExpr((BLangExpression) receive.parent, receive.env); } receive.sendExpression = send.expr; } private void validateWorkerActionParameters(BLangWorkerSyncSendExpr send, BLangWorkerReceive receive) { send.receive = receive; NodeKind parentNodeKind = send.parent.getKind(); if (parentNodeKind == NodeKind.VARIABLE) { BLangSimpleVariable variable = (BLangSimpleVariable) send.parent; if (variable.isDeclaredWithVar) { variable.type = variable.symbol.type = send.expectedType = receive.matchingSendsError; } } else if (parentNodeKind == NodeKind.ASSIGNMENT) { BLangAssignment assignment = (BLangAssignment) send.parent; if (assignment.varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BSymbol varSymbol = ((BLangSimpleVarRef) assignment.varRef).symbol; if (varSymbol != null) { send.expectedType = varSymbol.type; } } } if (receive.matchingSendsError != symTable.nilType && parentNodeKind == NodeKind.EXPRESSION_STATEMENT) { dlog.error(send.pos, DiagnosticCode.ASSIGNMENT_REQUIRED); } else { types.checkType(send.pos, receive.matchingSendsError, send.expectedType, DiagnosticCode.INCOMPATIBLE_TYPES); } types.checkType(receive, send.type, receive.type); addImplicitCast(send.type, receive); NodeKind kind = receive.parent.getKind(); if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR) { typeChecker.checkExpr((BLangExpression) receive.parent, receive.env); } receive.sendExpression = send; } private void addImplicitCast(BType actualType, BLangWorkerReceive receive) { if (receive.type != null && receive.type != symTable.semanticError) { types.setImplicitCastExpr(receive, actualType, receive.type); receive.type = actualType; } } private boolean checkNextBreakValidityInTransaction() { return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0; } private boolean checkReturnValidityInTransaction() { return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek()) && transactionCount > 0; } private boolean isValidTransactionBlock() { return !(this.withinRetryBlock || this.withinAbortedBlock || this.withinCommittedBlock); } private void validateMainFunction(BLangFunction funcNode) { if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) { return; } if (!Symbols.isPublic(funcNode.symbol)) { this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC); } funcNode.requiredParams.forEach(param -> { if (!param.type.isAnydata()) { this.dlog.error(param.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA, param.type); } }); if (funcNode.restParam != null && !funcNode.restParam.type.isAnydata()) { this.dlog.error(funcNode.restParam.pos, DiagnosticCode.MAIN_PARAMS_SHOULD_BE_ANYDATA, funcNode.restParam.type); } types.validateErrorOrNilReturn(funcNode, DiagnosticCode.MAIN_RETURN_SHOULD_BE_ERROR_OR_NIL); } private void validateModuleInitFunction(BLangFunction funcNode) { if (funcNode.attachedFunction || !Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcNode.name.value)) { return; } if (Symbols.isPublic(funcNode.symbol)) { this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_BE_PUBLIC); } if (!funcNode.requiredParams.isEmpty() || funcNode.restParam != null) { this.dlog.error(funcNode.pos, DiagnosticCode.MODULE_INIT_CANNOT_HAVE_PARAMS); } types.validateErrorOrNilReturn(funcNode, DiagnosticCode.MODULE_INIT_RETURN_SHOULD_BE_ERROR_OR_NIL); } private boolean getIsJSONContext(BType... arg) { if (this.isJSONContext) { return true; } for (BType type : arg) { if (types.isJSONContext(type)) { return true; } } return false; } private BType getErrorTypes(BType bType) { BType errorType = symTable.semanticError; int tag = bType.tag; if (tag == TypeTags.ERROR) { errorType = bType; } else if (tag == TypeTags.UNION) { LinkedHashSet<BType> errTypes = new LinkedHashSet<>(); Set<BType> memTypes = ((BUnionType) bType).getMemberTypes(); for (BType memType : memTypes) { if (memType.tag == TypeTags.ERROR) { errTypes.add(memType); } } errorType = errTypes.size() == 1 ? errTypes.iterator().next() : BUnionType.create(null, errTypes); } return errorType; } /** * This class contains the state machines for a set of workers. */ private static class WorkerActionSystem { public List<WorkerActionStateMachine> finshedWorkers = new ArrayList<>(); private Stack<WorkerActionStateMachine> workerActionStateMachines = new Stack<>(); private boolean hasErrors = false; public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos, BLangFunction node) { workerActionStateMachines.push(new WorkerActionStateMachine(pos, workerId, node)); } public void endWorkerActionStateMachine() { finshedWorkers.add(workerActionStateMachines.pop()); } public void addWorkerAction(BLangNode action) { this.workerActionStateMachines.peek().actions.add(action); } public WorkerActionStateMachine find(String workerId) { for (WorkerActionStateMachine worker : this.finshedWorkers) { if (worker.workerId.equals(workerId)) { return worker; } } throw new AssertionError("Reference to non existing worker " + workerId); } public boolean everyoneDone() { return this.finshedWorkers.stream().allMatch(WorkerActionStateMachine::done); } public DiagnosticPos getRootPosition() { return this.finshedWorkers.iterator().next().pos; } @Override public String toString() { return this.finshedWorkers.toString(); } public String currentWorkerId() { return workerActionStateMachines.peek().workerId; } } /** * This class represents a state machine to maintain the state of the send/receive * actions of a worker. */ private static class WorkerActionStateMachine { private static final String WORKER_SM_FINISHED = "FINISHED"; public int currentState; public List<BLangNode> actions = new ArrayList<>(); public DiagnosticPos pos; public String workerId; public BLangFunction node; public WorkerActionStateMachine(DiagnosticPos pos, String workerId, BLangFunction node) { this.pos = pos; this.workerId = workerId; this.node = node; } public boolean done() { return this.actions.size() == this.currentState; } public BLangNode currentAction() { return this.actions.get(this.currentState); } public boolean currentIsReceive(String sourceWorkerId) { if (this.done()) { return false; } BLangNode action = this.currentAction(); return !isWorkerSend(action) && !isWorkerSyncSend(action) && ((BLangWorkerReceive) action).workerIdentifier.value.equals(sourceWorkerId); } public void next() { this.currentState++; } @Override public String toString() { if (this.done()) { return WORKER_SM_FINISHED; } else { BLangNode action = this.currentAction(); if (isWorkerSend(action)) { return ((BLangWorkerSend) action).toActionString(); } else if (isWorkerSyncSend(action)) { return ((BLangWorkerSyncSendExpr) action).toActionString(); } else { return ((BLangWorkerReceive) action).toActionString(); } } } } private void checkExperimentalFeatureValidity(ExperimentalFeatures constructName, DiagnosticPos pos) { if (enableExperimentalFeatures) { return; } dlog.error(pos, DiagnosticCode.INVALID_USE_OF_EXPERIMENTAL_FEATURE, constructName.value); } public static String generateChannelName(String source, String target) { return source + "->" + target; } /** * Experimental feature list for JBallerina 1.0.0. * * @since JBallerina 1.0.0 */ private enum ExperimentalFeatures { TRANSACTIONS("transaction"), LOCK("lock"), XML_ACCESS("xml access expression"), XML_ATTRIBUTES_ACCESS("xml attribute expression"), ; private String value; private ExperimentalFeatures(String value) { this.value = value; } @Override public String toString() { return value; } } }
Yes, that's a good addition.
private static void testSourceDoesNotShutdown(boolean shouldHaveReaders) throws Exception { final int parallelism = 2; FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); TestCountingSource source = new TestCountingSource(20).withoutSplitting(); UnboundedSourceWrapper<KV<Integer, Integer>, TestCountingSource.CounterMark> sourceWrapper = new UnboundedSourceWrapper<>("noReader", options, source, parallelism); StreamingRuntimeContext mock = Mockito.mock(StreamingRuntimeContext.class); if (shouldHaveReaders) { Mockito.when(mock.getIndexOfThisSubtask()).thenReturn(0); } else { Mockito.when(mock.getIndexOfThisSubtask()).thenReturn(parallelism - 1); } Mockito.when(mock.getNumberOfParallelSubtasks()).thenReturn(parallelism); Mockito.when(mock.getExecutionConfig()).thenReturn(new ExecutionConfig()); ProcessingTimeService timerService = Mockito.mock(ProcessingTimeService.class); Mockito.when(timerService.getCurrentProcessingTime()).thenReturn(Long.MAX_VALUE); Mockito.when(mock.getProcessingTimeService()).thenReturn(timerService); sourceWrapper.setRuntimeContext(mock); sourceWrapper.open(new Configuration()); SourceFunction.SourceContext sourceContext = Mockito.mock(SourceFunction.SourceContext.class); Object checkpointLock = new Object(); Mockito.when(sourceContext.getCheckpointLock()).thenReturn(checkpointLock); sourceWrapper.setSourceContext(sourceContext); sourceWrapper.open(new Configuration()); assertThat(sourceWrapper.getLocalReaders().isEmpty(), is(!shouldHaveReaders)); Thread thread = new Thread( () -> { try { sourceWrapper.run(sourceContext); } catch (Exception e) { LOG.error("Error while running UnboundedSourceWrapper", e); } }); try { thread.start(); if (!shouldHaveReaders) { while (true) { StackTraceElement[] callStack = thread.getStackTrace(); if (callStack.length >= 2 && "sleep".equals(callStack[0].getMethodName()) && "finalizeSource".equals(callStack[1].getMethodName())) { break; } Thread.sleep(10); } } assertThat(sourceWrapper.isRunning(), is(true)); synchronized (checkpointLock) { sourceWrapper.onProcessingTime(42); } assertThat(sourceWrapper.isRunning(), is(true)); assertThat(thread.isAlive(), is(true)); sourceWrapper.cancel(); } finally { thread.interrupt(); thread.join(); } }
thread.interrupt();
private static void testSourceDoesNotShutdown(boolean shouldHaveReaders) throws Exception { final int parallelism = 2; FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class); TestCountingSource source = new TestCountingSource(20).withoutSplitting(); UnboundedSourceWrapper<KV<Integer, Integer>, TestCountingSource.CounterMark> sourceWrapper = new UnboundedSourceWrapper<>("noReader", options, source, parallelism); StreamingRuntimeContext mock = Mockito.mock(StreamingRuntimeContext.class); if (shouldHaveReaders) { Mockito.when(mock.getIndexOfThisSubtask()).thenReturn(0); } else { Mockito.when(mock.getIndexOfThisSubtask()).thenReturn(parallelism - 1); } Mockito.when(mock.getNumberOfParallelSubtasks()).thenReturn(parallelism); Mockito.when(mock.getExecutionConfig()).thenReturn(new ExecutionConfig()); ProcessingTimeService timerService = Mockito.mock(ProcessingTimeService.class); Mockito.when(timerService.getCurrentProcessingTime()).thenReturn(Long.MAX_VALUE); Mockito.when(mock.getProcessingTimeService()).thenReturn(timerService); sourceWrapper.setRuntimeContext(mock); sourceWrapper.open(new Configuration()); SourceFunction.SourceContext sourceContext = Mockito.mock(SourceFunction.SourceContext.class); Object checkpointLock = new Object(); Mockito.when(sourceContext.getCheckpointLock()).thenReturn(checkpointLock); sourceWrapper.setSourceContext(sourceContext); sourceWrapper.open(new Configuration()); assertThat(sourceWrapper.getLocalReaders().isEmpty(), is(!shouldHaveReaders)); Thread thread = new Thread( () -> { try { sourceWrapper.run(sourceContext); } catch (Exception e) { LOG.error("Error while running UnboundedSourceWrapper", e); } }); try { thread.start(); if (!shouldHaveReaders) { while (true) { StackTraceElement[] callStack = thread.getStackTrace(); if (callStack.length >= 2 && "sleep".equals(callStack[0].getMethodName()) && "finalizeSource".equals(callStack[1].getMethodName())) { break; } Thread.sleep(10); } } assertThat(sourceWrapper.isRunning(), is(true)); synchronized (checkpointLock) { sourceWrapper.onProcessingTime(42); } assertThat(sourceWrapper.isRunning(), is(true)); assertThat(thread.isAlive(), is(true)); sourceWrapper.cancel(); } finally { thread.interrupt(); thread.join(1000); assertThat(thread.isAlive(), is(false)); } }
class BasicTest { /** Check serialization a {@link UnboundedSourceWrapper}. */ @Test public void testSerialization() throws Exception { final int parallelism = 1; final int numElements = 20; PipelineOptions options = PipelineOptionsFactory.create(); TestCountingSource source = new TestCountingSource(numElements); UnboundedSourceWrapper<KV<Integer, Integer>, TestCountingSource.CounterMark> flinkWrapper = new UnboundedSourceWrapper<>("stepName", options, source, parallelism); InstantiationUtil.serializeObject(flinkWrapper); } @Test(timeout = 10_000) public void testSourceWithNoReaderDoesNotShutdown() throws Exception { testSourceDoesNotShutdown(false); } @Test(timeout = 10_000) public void testSourceWithReadersDoesNotShutdown() throws Exception { testSourceDoesNotShutdown(true); } }
class BasicTest { /** Check serialization a {@link UnboundedSourceWrapper}. */ @Test public void testSerialization() throws Exception { final int parallelism = 1; final int numElements = 20; PipelineOptions options = PipelineOptionsFactory.create(); TestCountingSource source = new TestCountingSource(numElements); UnboundedSourceWrapper<KV<Integer, Integer>, TestCountingSource.CounterMark> flinkWrapper = new UnboundedSourceWrapper<>("stepName", options, source, parallelism); InstantiationUtil.serializeObject(flinkWrapper); } @Test(timeout = 10_000) public void testSourceWithNoReaderDoesNotShutdown() throws Exception { testSourceDoesNotShutdown(false); } @Test(timeout = 10_000) public void testSourceWithReadersDoesNotShutdown() throws Exception { testSourceDoesNotShutdown(true); } }
if we are matching specific scenarios (eg. a native function) in these if conditions; shall we move those into private methods for the readability. For example; then we may write `if (isNativeFunction(...)) {`
public String getSourceForFunction(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("defaultConstructor") != null && node.get("defaultConstructor") .getAsBoolean()) { return ""; } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } }
&& node.get("returnTypeNode") != null) {
public String getSourceForFunction(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("defaultConstructor") != null && node.get("defaultConstructor") .getAsBoolean()) { return ""; } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isConstructor") != null && node.get("isConstructor") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("interface") != null && node.get("interface") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("isStreamAction") != null && node.get("isStreamAction") .getAsBoolean() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("lambda") != null && node.get("lambda") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noVisibleReceiver") != null && node.get("noVisibleReceiver") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("objectOuterFunction") != null && node.get("objectOuterFunction") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar") != null && !node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("objectOuterFunctionTypeName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "::" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("native") != null && node.get("native") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "extern" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("hasReturns") != null && node.get("hasReturns") .getAsBoolean() && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("returnTypeAnnotationAttachments") != null && node.get("returnTypeNode") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "returns" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnTypeAnnotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("returnTypeNode"), pretty, replaceLambda) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.get("receiver") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("receiver"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("restParameters") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + (node.has("hasRestParams") && node.get("hasRestParams").getAsBoolean() ? w("", sourceGenParams) + "," + a("", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("restParameters"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("allParams") != null && node.get("endpointNodes") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "function" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("allParams"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("endpointNodes"), pretty, replaceLambda, "", null, false, sourceGenParams) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } }
class SourceGen { private static final String TAB = " "; private int l = 0; private Map<String, JsonObject> anonTypes = new HashMap<>(); public SourceGen(int l) { this.l = l; } public String getSourceForImport(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("isInternal") != null && node.get("isInternal") .getAsBoolean()) { return ""; } else if (node.get("userDefinedAlias") != null && node.get("userDefinedAlias") .getAsBoolean() && node.getAsJsonObject("orgName").get("valueWithBar") != null && !node.getAsJsonObject("orgName").get("valueWithBar").getAsString().isEmpty() && node.get("packageName") != null && node.getAsJsonObject("alias").get("valueWithBar") != null && !node.getAsJsonObject("alias").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("orgName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "/" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w(" ", sourceGenParams) + "as" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("alias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("userDefinedAlias") != null && node.get("userDefinedAlias") .getAsBoolean() && node.get("packageName") != null && node.getAsJsonObject("alias").get("valueWithBar") != null && !node.getAsJsonObject("alias").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w(" ", sourceGenParams) + "as" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("alias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.getAsJsonObject("orgName").get("valueWithBar") != null && !node.getAsJsonObject("orgName").get("valueWithBar").getAsString().isEmpty() && node.get("packageName") != null) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("orgName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "/" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForIdentifier(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + node.get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } public String getSourceForAbort(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "abort" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForAction(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("returnParameters") != null && node.getAsJsonArray("returnParameters").size() > 0 && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnParameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("returnParameters") != null && node.getAsJsonArray("returnParameters").size() > 0 && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + dent(sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnParameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + dent(sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForAnnotation(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("attachmentPoints") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("attachmentPoints") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("attachmentPoints") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForAnnotationAttachment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("builtin") != null && node.get("builtin") .getAsBoolean() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty() && node.get("expression") != null) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else if (node.get("builtin") != null && node.get("builtin") .getAsBoolean() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty()) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } else if (node.getAsJsonObject("packageAlias").get("valueWithBar") != null && !node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty() && node.get("expression") != null) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ":" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else if (node.getAsJsonObject("packageAlias").get("valueWithBar") != null && !node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty()) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ":" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } else if (node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty() && node.get("expression") != null) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForArrayLiteralExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "[" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("expressions"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + "]" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForArrayType(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("isRestParam") != null && node.get("isRestParam") .getAsBoolean() && node.get("grouped") != null && node.get("grouped") .getAsBoolean() && node.get("elementType") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isRestParam") != null && node.get("isRestParam") .getAsBoolean() && node.get("elementType") != null) { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda); } else if (node.get("grouped") != null && node.get("grouped") .getAsBoolean() && node.get("elementType") != null && node.get("dimensionAsString") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda) + w("", sourceGenParams) + node.get("dimensionAsString").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda) + w("", sourceGenParams) + node.get("dimensionAsString").getAsString() + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForArrowExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("hasParantheses") != null && node.get("hasParantheses") .getAsBoolean() && node.get("parameters") != null && node.get("expression") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else { return join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } } public String getSourceForAssignment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + (node.has("declaredWithVar") && node.get("declaredWithVar").getAsBoolean() ? w("", sourceGenParams) + "var" + a(" ", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("variable"), pretty, replaceLambda) + w(" ", sourceGenParams) + "=" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForAwaitExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "await" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } public String getSourceForBinaryExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("inTemplateLiteral") != null && node.get("inTemplateLiteral") .getAsBoolean() && node.get("leftExpression") != null && node.get("operatorKind") != null && node.get("rightExpression") != null) { return w("", sourceGenParams) + "{{" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("leftExpression"), pretty, replaceLambda) + w(" ", sourceGenParams) + node.get("operatorKind").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("rightExpression"), pretty, replaceLambda) + w("", sourceGenParams) + "}}" + a("", sourceGenParams.isShouldIndent()); } else { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("leftExpression"), pretty, replaceLambda) + w(" ", sourceGenParams) + node.get("operatorKind").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("rightExpression"), pretty, replaceLambda); } } public String getSourceForBind(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "bind" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda) + w(" ", sourceGenParams) + "with" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("variable"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForBlock(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("isElseBlock") != null && node.get("isElseBlock") .getAsBoolean() && node.get("statements") != null) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "else" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("statements"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("statements"), pretty, replaceLambda, "", null, false, sourceGenParams); } } public String getSourceForBreak(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "break" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForBracedTupleExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("expressions"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForBuiltInRefType(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("grouped") != null && node.get("grouped") .getAsBoolean() && node.get("typeKind") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("typeKind").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else { return w("", sourceGenParams) + node.get("typeKind").getAsString() + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForCatch(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "catch" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("parameter"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForCheckExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "check" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } public String getSourceForComment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("comment").getAsString() + a("", sourceGenParams.isShouldIndent()); } public String getSourceForCompoundAssignment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("variable"), pretty, replaceLambda) + w("", sourceGenParams) + "+=" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForConnector(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("variableDefs") != null && node.get("actions") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("variableDefs"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("variableDefs") != null && node.get("actions") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("variableDefs"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("actions") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForConnectorInitExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("connectorType") != null && node.get("expressions") != null) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "create" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("connectorType"), pretty, replaceLambda) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("expressions"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "create" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("connectorType"), pretty, replaceLambda) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForConstrainedType(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("type"), pretty, replaceLambda) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("constraint"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForDocumentationAttribute(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + node.get("paramType").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{{" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("documentationField").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}}" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("documentationText").getAsString() + a("", sourceGenParams.isShouldIndent()); } public String getSourceForDeprecated(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + node.get("deprecatedStart").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("documentationText").getAsString() + a("", sourceGenParams.isShouldIndent()) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); }
class SourceGen { private static final String TAB = " "; private int l = 0; private Map<String, JsonObject> anonTypes = new HashMap<>(); public SourceGen(int l) { this.l = l; } public String getSourceForImport(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("isInternal") != null && node.get("isInternal") .getAsBoolean()) { return ""; } else if (node.get("userDefinedAlias") != null && node.get("userDefinedAlias") .getAsBoolean() && node.getAsJsonObject("orgName").get("valueWithBar") != null && !node.getAsJsonObject("orgName").get("valueWithBar").getAsString().isEmpty() && node.get("packageName") != null && node.getAsJsonObject("alias").get("valueWithBar") != null && !node.getAsJsonObject("alias").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("orgName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "/" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w(" ", sourceGenParams) + "as" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("alias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("userDefinedAlias") != null && node.get("userDefinedAlias") .getAsBoolean() && node.get("packageName") != null && node.getAsJsonObject("alias").get("valueWithBar") != null && !node.getAsJsonObject("alias").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w(" ", sourceGenParams) + "as" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("alias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.getAsJsonObject("orgName").get("valueWithBar") != null && !node.getAsJsonObject("orgName").get("valueWithBar").getAsString().isEmpty() && node.get("packageName") != null) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("orgName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "/" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "import" + a(" ", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("packageName"), pretty, replaceLambda, "", ".", false, sourceGenParams) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForIdentifier(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + node.get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } public String getSourceForAbort(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "abort" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForAction(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("returnParameters") != null && node.getAsJsonArray("returnParameters").size() > 0 && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnParameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("returnParameters") != null && node.getAsJsonArray("returnParameters").size() > 0 && node.get("body") != null && node.get("workers") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + dent(sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("returnParameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("body") != null && node.get("workers") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "action" + a("", sourceGenParams.isShouldIndent()) + dent(sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + join(node.getAsJsonArray("workers"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForAnnotation(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("noAttachmentPoints") != null && node.get("noAttachmentPoints") .getAsBoolean() && node.get("annotationAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("attachmentPoints") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("attachmentPoints") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("typeNode") != null) { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("typeNode"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("attachmentPoints") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty()) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } else { return dent(sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + w("", sourceGenParams) + "annotation" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("attachmentPoints"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForAnnotationAttachment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("builtin") != null && node.get("builtin") .getAsBoolean() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty() && node.get("expression") != null) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else if (node.get("builtin") != null && node.get("builtin") .getAsBoolean() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty()) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } else if (node.getAsJsonObject("packageAlias").get("valueWithBar") != null && !node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty() && node.get("expression") != null) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ":" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else if (node.getAsJsonObject("packageAlias").get("valueWithBar") != null && !node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString().isEmpty() && node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty()) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("packageAlias").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ":" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } else if (node.getAsJsonObject("annotationName").get("valueWithBar") != null && !node.getAsJsonObject("annotationName").get("valueWithBar").getAsString().isEmpty() && node.get("expression") != null) { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else { return w("", sourceGenParams) + "@" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("annotationName").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForArrayLiteralExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "[" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("expressions"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + "]" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForArrayType(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("isRestParam") != null && node.get("isRestParam") .getAsBoolean() && node.get("grouped") != null && node.get("grouped") .getAsBoolean() && node.get("elementType") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("isRestParam") != null && node.get("isRestParam") .getAsBoolean() && node.get("elementType") != null) { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda); } else if (node.get("grouped") != null && node.get("grouped") .getAsBoolean() && node.get("elementType") != null && node.get("dimensionAsString") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda) + w("", sourceGenParams) + node.get("dimensionAsString").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("elementType"), pretty, replaceLambda) + w("", sourceGenParams) + node.get("dimensionAsString").getAsString() + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForArrowExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("hasParantheses") != null && node.get("hasParantheses") .getAsBoolean() && node.get("parameters") != null && node.get("expression") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } else { return join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + "=>" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } } public String getSourceForAssignment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + (node.has("declaredWithVar") && node.get("declaredWithVar").getAsBoolean() ? w("", sourceGenParams) + "var" + a(" ", sourceGenParams.isShouldIndent()) : "") + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("variable"), pretty, replaceLambda) + w(" ", sourceGenParams) + "=" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForAwaitExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "await" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } public String getSourceForBinaryExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("inTemplateLiteral") != null && node.get("inTemplateLiteral") .getAsBoolean() && node.get("leftExpression") != null && node.get("operatorKind") != null && node.get("rightExpression") != null) { return w("", sourceGenParams) + "{{" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("leftExpression"), pretty, replaceLambda) + w(" ", sourceGenParams) + node.get("operatorKind").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("rightExpression"), pretty, replaceLambda) + w("", sourceGenParams) + "}}" + a("", sourceGenParams.isShouldIndent()); } else { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("leftExpression"), pretty, replaceLambda) + w(" ", sourceGenParams) + node.get("operatorKind").getAsString() + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("rightExpression"), pretty, replaceLambda); } } public String getSourceForBind(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "bind" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda) + w(" ", sourceGenParams) + "with" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("variable"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForBlock(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("isElseBlock") != null && node.get("isElseBlock") .getAsBoolean() && node.get("statements") != null) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "else" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("statements"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("statements"), pretty, replaceLambda, "", null, false, sourceGenParams); } } public String getSourceForBreak(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "break" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForBracedTupleExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("expressions"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForBuiltInRefType(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("grouped") != null && node.get("grouped") .getAsBoolean() && node.get("typeKind") != null) { return w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("typeKind").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else { return w("", sourceGenParams) + node.get("typeKind").getAsString() + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForCatch(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "catch" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("parameter"), pretty, replaceLambda) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("body"), pretty, replaceLambda) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForCheckExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + "check" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda); } public String getSourceForComment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("comment").getAsString() + a("", sourceGenParams.isShouldIndent()); } public String getSourceForCompoundAssignment(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("variable"), pretty, replaceLambda) + w("", sourceGenParams) + "+=" + a(" ", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("expression"), pretty, replaceLambda) + w("", sourceGenParams) + ";" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForConnector(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("variableDefs") != null && node.get("actions") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("variableDefs"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("variableDefs") != null && node.get("actions") != null) { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("variableDefs"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else if (node.get("markdownDocumentationAttachment") != null && node.get("annotationAttachments") != null && node.get("deprecatedAttachments") != null && node.getAsJsonObject("name").get("valueWithBar") != null && !node.getAsJsonObject("name").get("valueWithBar").getAsString().isEmpty() && node.get("parameters") != null && node.get("actions") != null) { return dent(sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("markdownDocumentationAttachment"), pretty, replaceLambda) + join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } else { return join(node.getAsJsonArray("annotationAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + join(node.getAsJsonArray("deprecatedAttachments"), pretty, replaceLambda, "", null, false, sourceGenParams) + (node.has("public") && node.get("public").getAsBoolean() ? w("", sourceGenParams) + "public" + a(" ", sourceGenParams.isShouldIndent()) : "") + dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "connector" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + node.getAsJsonObject("name").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("parameters"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()) + w(" ", sourceGenParams) + "{" + a("", sourceGenParams.isShouldIndent()) + indent() + join(node.getAsJsonArray("actions"), pretty, replaceLambda, "", null, false, sourceGenParams) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForConnectorInitExpr(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { if (node.get("connectorType") != null && node.get("expressions") != null) { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "create" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("connectorType"), pretty, replaceLambda) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + join(node.getAsJsonArray("expressions"), pretty, replaceLambda, "", ",", false, sourceGenParams) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } else { return dent(sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "create" + a("", sourceGenParams.isShouldIndent()) + a(" ", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("connectorType"), pretty, replaceLambda) + w("", sourceGenParams) + "(" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + ")" + a("", sourceGenParams.isShouldIndent()); } } public String getSourceForConstrainedType(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("type"), pretty, replaceLambda) + w("", sourceGenParams) + "<" + a("", sourceGenParams.isShouldIndent()) + a("", sourceGenParams.isShouldIndent()) + getSourceOf(node.getAsJsonObject("constraint"), pretty, replaceLambda) + w("", sourceGenParams) + ">" + a("", sourceGenParams.isShouldIndent()); } public String getSourceForDocumentationAttribute(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + node.get("paramType").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "{{" + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.getAsJsonObject("documentationField").get("valueWithBar").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}}" + a(" ", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("documentationText").getAsString() + a("", sourceGenParams.isShouldIndent()); } public String getSourceForDeprecated(JsonObject node, boolean pretty, boolean replaceLambda, SourceGenParams sourceGenParams) { return w("", sourceGenParams) + node.get("deprecatedStart").getAsString() + a("", sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + node.get("documentationText").getAsString() + a("", sourceGenParams.isShouldIndent()) + outdent(node, sourceGenParams.isShouldIndent()) + w("", sourceGenParams) + "}" + a("", sourceGenParams.isShouldIndent()); }
the purpose of `getCreateDocumentRequest` is just to create the request. retry-policy interaction should happen outside. why are we moving this?
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PUT); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); }
return getStoreProxy(request).processMessage(request);
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PUT); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider { private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private CosmosKeyCredential cosmosKeyCredential; private TokenResolver tokenResolver; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final HttpClient reactorHttpClient; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, TokenResolver tokenResolver, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled); this.tokenResolver = tokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { logger.info( "Initializing DocumentClient with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.cosmosKeyCredential = cosmosKeyCredential; if (this.cosmosKeyCredential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)){ this.cosmosKeyCredential = new CosmosKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(); } boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; } private void initializeGatewayConfigurationReader() { String resourceToken; if(this.tokenResolver != null) { resourceToken = this.tokenResolver.getAuthorizationToken(RequestVerb.GET, "", CosmosResourceType.System, null); } else if(!this.hasAuthKeyResourceToken && this.authorizationTokenProvider == null) { resourceToken = this.firstResourceTokenFromPermissionFeed; } else { assert this.masterKeyOrResourceToken != null || this.cosmosKeyCredential != null; resourceToken = this.masterKeyOrResourceToken; } this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.serviceEndpoint, this.hasAuthKeyResourceToken, resourceToken, this.connectionPolicy, this.authorizationTokenProvider, this.reactorHttpClient); DatabaseAccount databaseAccount = this.gatewayConfigurationReader.initializeReaderAsync().block(); this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); this.globalEndpointManager.refreshLocationAsync(databaseAccount, false).block(); } public void init() { this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); this.collectionCache = new RxClientCollectionCache(this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } } private void initializeDirectConnectivity() { this.storeClientFactory = new StoreClientFactory( this.configs, this.connectionPolicy.getRequestTimeoutInMillis() / 1000, 0, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled ); this.addressResolver = new GlobalAddressResolver( this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { return new RxGatewayStoreModel(sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeoutInMillis(this.connectionPolicy.getIdleConnectionTimeoutInMillis()) .withPoolSize(this.connectionPolicy.getMaxPoolSize()) .withHttpProxy(this.connectionPolicy.getProxy()) .withRequestTimeoutInMillis(this.connectionPolicy.getRequestTimeoutInMillis()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig); } else { return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient( this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, false ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, database, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(FeedOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResouceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResouceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResouceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResouceLink, Paths.USERS_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResouceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResouceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResouceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResouceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResouceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String queryResourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createDocumentQueryExecutionContextAsync(queryClient, resourceTypeEnum, klass, sqlQuery , options, queryResourceLink, false, activityId); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, FeedOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.DELETE); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.GET); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.GET); return gatewayProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.POST); return this.getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, FeedOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(Object[] objectArray) { String[] stringArray = new String[objectArray.length]; for (int i = 0; i < objectArray.length; ++i) { Object object = objectArray[i]; if (object instanceof JsonSerializable) { stringArray[i] = ((JsonSerializable) object).toJson(); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } if (options.getAccessCondition() != null) { if (options.getAccessCondition().getType() == AccessConditionType.IF_MATCH) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getAccessCondition().getCondition()); } else { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getAccessCondition().getCondition()); } } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.isPopulateQuotaInfo()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } return headers; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsString, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsString, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsString != null) { Callable<PartitionKeyInternal> extractPartitionKeyCallable = () -> { CosmosItemProperties cosmosItemProperties; if (objectDoc instanceof CosmosItemProperties) { cosmosItemProperties = (CosmosItemProperties) objectDoc; } else { cosmosItemProperties = new CosmosItemProperties(contentAsString); } return extractPartitionKeyValueFromDocument(cosmosItemProperties, partitionKeyDefinition); }; SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosResponseDiagnostics); partitionKeyInternal = serializationDiagnosticsContext.getResource(extractPartitionKeyCallable, SerializationDiagnosticsContext.SerializationType.PartitionKeyFetchSerialization); } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } private static PartitionKeyInternal extractPartitionKeyValueFromDocument( CosmosItemProperties document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = document.getObjectByPath(parts); if (value == null || value.getClass() == ObjectNode.class) { value = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } String content = toJsonString(document, mapper); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private void populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.tokenResolver != null || this.cosmosKeyCredential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.tokenResolver != null) { return this.tokenResolver.getAuthorizationToken(requestVerb, resourceName, this.resolveCosmosResourceType(resourceType), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (cosmosKeyCredential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { try { return CosmosResourceType.valueOf(resourceType.toString()); } catch (IllegalArgumentException e) { return CosmosResourceType.System; } } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy retryPolicy) { populateHeaders(request, RequestVerb.POST); RxStoreModel storeProxy = this.getStoreProxy(request); if(request.requestContext != null && retryPolicy.getRetryCount() > 0) { retryPolicy.updateEndTime(); request.requestContext.updateRetryContext(retryPolicy, true); } return storeProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.POST); Map<String, String> headers = request.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(requestRetryPolicy, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> { return create(request, requestRetryPolicy); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(retryPolicyInstance, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> { return upsert(request, retryPolicyInstance); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options); String content = toJsonString(document, mapper); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> { return replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class));} ); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { return this.delete(req, retryPolicyInstance) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class));}); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<Pair<String, PartitionKey>> itemKeyList, String collectionLink, FeedOptions options, Class<T> klass) { RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), collection.getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemKeyList .forEach(stringPartitionKeyPair -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString(BridgeInternal .getPartitionKeyInternal(stringPartitionKeyPair .getRight()), collection .getPartitionKey()); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<Pair<String, PartitionKey>> list = new ArrayList<>(); list.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, list); } else { List<Pair<String, PartitionKey>> pairs = partitionRangeItemKeyMap.get(range); pairs.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, pairs); } }); Set<PartitionKeyRange> partitionKeyRanges = partitionRangeItemKeyMap.keySet(); List<PartitionKeyRange> ranges = new ArrayList<>(); ranges.addAll(partitionKeyRanges); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); String sqlQuery = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; return createReadManyQuery(collectionLink, new SqlQuerySpec(sqlQuery), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<T>(); HashMap<String, String> headers = new HashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> document.toObject(klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<Pair<String, PartitionKey>>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); String idValue = pair.getLeft(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = pair.getLeft(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private String getCurentParamName(int paramCnt){ return "@param" + paramCnt; } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, FeedOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient DocumentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { return RxDocumentClientImpl.this.query(request).single(); } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed(final String collectionLink, final ChangeFeedOptions changeFeedOptions) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<Document>(this, ResourceType.Document, Document.class, collectionLink, changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, FeedOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, Object[] procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, Object[] procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, Object[] procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, FeedOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, FeedOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, FeedOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, FeedOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, FeedOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, FeedOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(FeedOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeedCollectionChild(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; RequestOptions requestOptions = new RequestOptions(); requestOptions.setPartitionKey(options.partitionKey()); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(null, request); Mono<RxDocumentServiceRequest> requestObs = this.addPartitionKeyInformation(request, null, null, requestOptions, collectionObs); return requestObs.flatMap(req -> this.readFeed(req) .map(response -> toFeedResponsePage(response, klass))); }, this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, FeedOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(response -> toDatabaseAccount(response)); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); this.populateHeaders(request, RequestVerb.GET); request.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(request).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> { this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((request.getOperationType() == OperationType.Query || request.getOperationType() == OperationType.SqlQuery) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); try { this.reactorHttpClient.shutdown(); } catch (Exception e) { logger.warn("shutting down reactorHttpClient failed", e); } logger.info("Shutting down completed."); } }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider { private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private CosmosKeyCredential cosmosKeyCredential; private CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final HttpClient reactorHttpClient; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { logger.info( "Initializing DocumentClient with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.cosmosKeyCredential = cosmosKeyCredential; if (this.cosmosKeyCredential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)){ this.cosmosKeyCredential = new CosmosKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(); } boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; } private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); assert(databaseAccount != null); this.useMultipleWriteLocations = this.connectionPolicy.isUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); } public void init() { this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); this.collectionCache = new RxClientCollectionCache(this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } } private void initializeDirectConnectivity() { this.storeClientFactory = new StoreClientFactory( this.configs, this.connectionPolicy.getRequestTimeout(), 0, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled ); this.addressResolver = new GlobalAddressResolver( this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { return new RxGatewayStoreModel(sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeout(this.connectionPolicy.getIdleConnectionTimeout()) .withPoolSize(this.connectionPolicy.getMaxPoolSize()) .withHttpProxy(this.connectionPolicy.getProxy()) .withRequestTimeout(this.connectionPolicy.getRequestTimeout()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig); } else { return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient( this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, false ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options); ZonedDateTime serializationStartTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(database); ZonedDateTime serializationEndTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.DATABASE_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosResponseDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(FeedOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResouceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResouceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResouceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResouceLink, Paths.USERS_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResouceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResouceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResouceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResouceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResouceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String queryResourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createDocumentQueryExecutionContextAsync(queryClient, resourceTypeEnum, klass, sqlQuery , options, queryResourceLink, false, activityId); return executionContext.flatMap(IDocumentQueryExecutionContext<T>::executeAsync); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, FeedOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); ZonedDateTime serializationStartTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); ZonedDateTime serializationEndTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosResponseDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); ZonedDateTime serializationStartTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); ZonedDateTime serializationEndTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosResponseDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.DELETE); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.GET); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.GET); return gatewayProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.POST); return this.getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, FeedOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(Object[] objectArray) { String[] stringArray = new String[objectArray.length]; for (int i = 0; i < objectArray.length; ++i) { Object object = objectArray[i]; if (object instanceof JsonSerializable) { stringArray[i] = ModelBridgeInternal.toJsonFromJsonSerializable((JsonSerializable) object); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } if (options.getAccessCondition() != null) { if (options.getAccessCondition().getType() == AccessConditionType.IF_MATCH) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getAccessCondition().getCondition()); } else { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getAccessCondition().getCondition()); } } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.isPopulateQuotaInfo()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } return headers; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsByteBuffer != null) { CosmosItemProperties cosmosItemProperties; if (objectDoc instanceof CosmosItemProperties) { cosmosItemProperties = (CosmosItemProperties) objectDoc; } else { contentAsByteBuffer.rewind(); cosmosItemProperties = new CosmosItemProperties(contentAsByteBuffer); } ZonedDateTime serializationStartTime = ZonedDateTime.now(ZoneOffset.UTC); partitionKeyInternal = extractPartitionKeyValueFromDocument(cosmosItemProperties, partitionKeyDefinition); ZonedDateTime serializationEndTime = ZonedDateTime.now(ZoneOffset.UTC); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTime, serializationEndTime, SerializationDiagnosticsContext.SerializationType.PARTITION_KEY_FETCH_SERIALIZATION ); SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosResponseDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } private static PartitionKeyInternal extractPartitionKeyValueFromDocument( CosmosItemProperties document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, parts); if (value == null || value.getClass() == ObjectNode.class) { value = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } ZonedDateTime serializationStartTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); ByteBuffer content = BridgeInternal.serializeJsonToByteBuffer(document, mapper); ZonedDateTime serializationEndTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosResponseDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private void populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.cosmosAuthorizationTokenResolver != null || this.cosmosKeyCredential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.cosmosAuthorizationTokenResolver != null) { return this.cosmosAuthorizationTokenResolver.getAuthorizationToken(requestVerb, resourceName, this.resolveCosmosResourceType(resourceType), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (cosmosKeyCredential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { CosmosResourceType cosmosResourceType = ModelBridgeInternal.fromServiceSerializedFormat(resourceType.toString()); if (cosmosResourceType == null) { return CosmosResourceType.SYSTEM; } return cosmosResourceType; } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy retryPolicy) { populateHeaders(request, RequestVerb.POST); RxStoreModel storeProxy = this.getStoreProxy(request); if(request.requestContext != null && retryPolicy.getRetryCount() > 0) { retryPolicy.updateEndTime(); request.requestContext.updateRetryContext(retryPolicy, true); } return storeProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.POST); Map<String, String> headers = request.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(requestRetryPolicy, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> { return create(request, requestRetryPolicy); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(retryPolicyInstance, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> { return upsert(request, retryPolicyInstance); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options); ZonedDateTime serializationStartTimeUTC = ZonedDateTime.now(ZoneOffset.UTC); ByteBuffer content = serializeJsonToByteBuffer(document); ZonedDateTime serializationEndTime = ZonedDateTime.now(ZoneOffset.UTC); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosResponseDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> { return replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class));} ); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { return this.delete(req, retryPolicyInstance) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class));}); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<Pair<String, PartitionKey>> itemKeyList, String collectionLink, FeedOptions options, Class<T> klass) { RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosResponseDiagnostics), collection.getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemKeyList .forEach(stringPartitionKeyPair -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString(BridgeInternal .getPartitionKeyInternal(stringPartitionKeyPair .getRight()), collection .getPartitionKey()); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<Pair<String, PartitionKey>> list = new ArrayList<>(); list.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, list); } else { List<Pair<String, PartitionKey>> pairs = partitionRangeItemKeyMap.get(range); pairs.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, pairs); } }); Set<PartitionKeyRange> partitionKeyRanges = partitionRangeItemKeyMap.keySet(); List<PartitionKeyRange> ranges = new ArrayList<>(); ranges.addAll(partitionKeyRanges); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); String sqlQuery = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; return createReadManyQuery(collectionLink, new SqlQuerySpec(sqlQuery), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<T>(); HashMap<String, String> headers = new HashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> ModelBridgeInternal.toObjectFromJsonSerializable(document, klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<Pair<String, PartitionKey>>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); String idValue = pair.getLeft(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = pair.getLeft(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext<T>::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, FeedOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient documentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { return RxDocumentClientImpl.this.query(request).single(); } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed(final String collectionLink, final ChangeFeedOptions changeFeedOptions) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<Document>(this, ResourceType.Document, Document.class, collectionLink, changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, FeedOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, Object[] procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, Object[] procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, Object[] procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, FeedOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, FeedOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, FeedOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, FeedOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, FeedOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, FeedOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(FeedOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.getMaxItemCount() != null ? options.getMaxItemCount() : -1; final FeedOptions finalFeedOptions = options; BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, FeedOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(response -> toDatabaseAccount(response)); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); this.populateHeaders(request, RequestVerb.GET); request.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(request).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> { this.useMultipleWriteLocations = this.connectionPolicy.isUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((request.getOperationType() == OperationType.Query || request.getOperationType() == OperationType.SqlQuery) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); try { this.reactorHttpClient.shutdown(); } catch (Exception e) { logger.warn("shutting down reactorHttpClient failed", e); } logger.info("Shutting down completed."); } }
For now, this is ok I guess. But this should be set to the position covering the finite type elements. i.e., starting pos should be the first element's starting pos and the ending pos should be the last element's ending pos.
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) { List<TypeDescriptorNode> nodes = flattenUnionType(unionTypeDescriptorNode); List<TypeDescriptorNode> finiteTypeElements = new ArrayList<>(); List<List<TypeDescriptorNode>> unionTypeElementsCollection = new ArrayList<>(); for (TypeDescriptorNode type : nodes) { if (type.kind() == SyntaxKind.SINGLETON_TYPE_DESC) { finiteTypeElements.add(type); unionTypeElementsCollection.add(new ArrayList<>()); } else { List<TypeDescriptorNode> lastOfOthers; if (unionTypeElementsCollection.isEmpty()) { lastOfOthers = new ArrayList<>(); unionTypeElementsCollection.add(lastOfOthers); } else { lastOfOthers = unionTypeElementsCollection.get(unionTypeElementsCollection.size() - 1); } lastOfOthers.add(type); } } List<TypeDescriptorNode> unionElements = new ArrayList<>(); reverseFlatMap(unionTypeElementsCollection, unionElements); BLangFiniteTypeNode bLangFiniteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); for (TypeDescriptorNode finiteTypeEl : finiteTypeElements) { SingletonTypeDescriptorNode singletonTypeNode = (SingletonTypeDescriptorNode) finiteTypeEl; BLangLiteral literal = createSimpleLiteral(singletonTypeNode.simpleContExprNode(), true); bLangFiniteTypeNode.addValue(literal); } if (unionElements.isEmpty()) { return bLangFiniteTypeNode; } BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.pos = getPosition(unionTypeDescriptorNode); for (TypeDescriptorNode unionElement : unionElements) { unionTypeNode.memberTypeNodes.add(createTypeNode(unionElement)); } bLangFiniteTypeNode.setPosition(unionTypeNode.pos); if (!finiteTypeElements.isEmpty()) { unionTypeNode.memberTypeNodes.add(deSugarTypeAsUserDefType(bLangFiniteTypeNode)); } return unionTypeNode; }
bLangFiniteTypeNode.setPosition(unionTypeNode.pos);
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) { List<TypeDescriptorNode> nodes = flattenUnionType(unionTypeDescriptorNode); List<TypeDescriptorNode> finiteTypeElements = new ArrayList<>(); List<List<TypeDescriptorNode>> unionTypeElementsCollection = new ArrayList<>(); for (TypeDescriptorNode type : nodes) { if (type.kind() == SyntaxKind.SINGLETON_TYPE_DESC) { finiteTypeElements.add(type); unionTypeElementsCollection.add(new ArrayList<>()); } else { List<TypeDescriptorNode> lastOfOthers; if (unionTypeElementsCollection.isEmpty()) { lastOfOthers = new ArrayList<>(); unionTypeElementsCollection.add(lastOfOthers); } else { lastOfOthers = unionTypeElementsCollection.get(unionTypeElementsCollection.size() - 1); } lastOfOthers.add(type); } } List<TypeDescriptorNode> unionElements = new ArrayList<>(); reverseFlatMap(unionTypeElementsCollection, unionElements); BLangFiniteTypeNode bLangFiniteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); for (TypeDescriptorNode finiteTypeEl : finiteTypeElements) { SingletonTypeDescriptorNode singletonTypeNode = (SingletonTypeDescriptorNode) finiteTypeEl; BLangLiteral literal = createSimpleLiteral(singletonTypeNode.simpleContExprNode(), true); bLangFiniteTypeNode.addValue(literal); } if (unionElements.isEmpty()) { return bLangFiniteTypeNode; } BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.pos = getPosition(unionTypeDescriptorNode); for (TypeDescriptorNode unionElement : unionElements) { unionTypeNode.memberTypeNodes.add(createTypeNode(unionElement)); } bLangFiniteTypeNode.setPosition(unionTypeNode.pos); if (!finiteTypeElements.isEmpty()) { unionTypeNode.memberTypeNodes.add(deSugarTypeAsUserDefType(bLangFiniteTypeNode)); } return unionTypeNode; }
class BLangNodeTransformer extends NodeTransformer<BLangNode> { private static final String IDENTIFIER_LITERAL_PREFIX = "'"; private BLangDiagnosticLog dlog; private SymbolTable symTable; private PackageCache packageCache; private PackageID packageID; private String currentCompUnitName; private BLangCompilationUnit currentCompilationUnit; private BLangAnonymousModelHelper anonymousModelHelper; private BLangMissingNodesHelper missingNodesHelper; /* To keep track of additional statements produced from multi-BLangNode resultant transformations */ private Stack<BLangStatement> additionalStatements = new Stack<>(); /* To keep track if we are inside a block statment for the use of type definition creation */ private boolean isInLocalContext = false; public BLangNodeTransformer(CompilerContext context, PackageID packageID, String entryName) { this.dlog = BLangDiagnosticLog.getInstance(context); this.dlog.setCurrentPackageId(packageID); this.symTable = SymbolTable.getInstance(context); this.packageID = packageID; this.currentCompUnitName = entryName; this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); } public List<org.ballerinalang.model.tree.Node> accept(Node node) { BLangNode bLangNode = node.apply(this); List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>(); while (!additionalStatements.empty()) { nodes.add(additionalStatements.pop()); } nodes.add(bLangNode); return nodes; } @Override public BLangNode transform(IdentifierToken identifierToken) { return this.createIdentifier(getPosition(identifierToken), identifierToken); } private Optional<Node> getDocumentationString(Optional<MetadataNode> metadataNode) { return metadataNode.map(MetadataNode::documentationString).orElse(null); } private NodeList<AnnotationNode> getAnnotations(Optional<MetadataNode> metadataNode) { return metadataNode.map(MetadataNode::annotations).orElse(null); } private Location getPosition(Node node) { if (node == null) { return null; } LineRange lineRange = node.lineRange(); LinePosition startPos = lineRange.startLine(); LinePosition endPos = lineRange.endLine(); return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(), startPos.offset(), endPos.offset()); } private Location getPosition(Node startNode, Node endNode) { if (startNode == null || endNode == null) { return null; } LinePosition startPos = startNode.lineRange().startLine(); LinePosition endPos = endNode.lineRange().endLine(); return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(), startPos.offset(), endPos.offset()); } private Location getPositionWithoutMetadata(Node node) { if (node == null) { return null; } LineRange nodeLineRange = node.lineRange(); NonTerminalNode nonTerminalNode = (NonTerminalNode) node; ChildNodeList children = nonTerminalNode.children(); LinePosition startPos; if (children.get(0).kind() == SyntaxKind.METADATA) { startPos = children.get(1).lineRange().startLine(); } else { startPos = nodeLineRange.startLine(); } LinePosition endPos = nodeLineRange.endLine(); return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(), startPos.offset(), endPos.offset()); } @Override public BLangNode transform(ModulePartNode modulePart) { BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit(); this.currentCompilationUnit = compilationUnit; compilationUnit.name = currentCompUnitName; compilationUnit.setPackageID(packageID); Location pos = getPosition(modulePart); for (ImportDeclarationNode importDecl : modulePart.imports()) { BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this); bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName()); compilationUnit.addTopLevelNode(bLangImport); } for (ModuleMemberDeclarationNode member : modulePart.members()) { compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this)); } Location newLocation = new BLangDiagnosticLocation(pos.lineRange().filePath(), 0, 0, 0, 0); compilationUnit.pos = newLocation; compilationUnit.setPackageID(packageID); this.currentCompilationUnit = null; return compilationUnit; } @Override public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) { TypedBindingPatternNode typedBindingPattern = modVarDeclrNode.typedBindingPattern(); BindingPatternNode bindingPatternNode = typedBindingPattern.bindingPattern(); BLangVariable variable = getBLangVariableNode(bindingPatternNode); if (modVarDeclrNode.visibilityQualifier().isPresent()) { markVariableWithFlag(variable, Flag.PUBLIC); } initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), modVarDeclrNode.initializer(), modVarDeclrNode.qualifiers()); NodeList<AnnotationNode> annotations = getAnnotations(modVarDeclrNode.metadata()); if (annotations != null) { variable.annAttachments = applyAll(annotations); } variable.pos = getPositionWithoutMetadata(modVarDeclrNode); variable.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(modVarDeclrNode.metadata())); return variable; } @Override public BLangNode transform(ImportDeclarationNode importDeclaration) { ImportOrgNameNode orgNameNode = importDeclaration.orgName().orElse(null); Optional<ImportPrefixNode> prefixNode = importDeclaration.prefix(); Token prefix = prefixNode.isPresent() ? prefixNode.get().prefix() : null; Token orgName = null; if (orgNameNode != null) { orgName = orgNameNode.orgName(); } String version = null; List<BLangIdentifier> pkgNameComps = new ArrayList<>(); NodeList<IdentifierToken> names = importDeclaration.moduleName(); Location position = getPosition(importDeclaration); names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null))); BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode(); importDcl.pos = position; importDcl.pkgNameComps = pkgNameComps; importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName); importDcl.version = this.createIdentifier(null, version); importDcl.alias = (prefix != null) ? this.createIdentifier(getPosition(prefix), prefix) : pkgNameComps.get(pkgNameComps.size() - 1); return importDcl; } @Override public BLangNode transform(MethodDeclarationNode methodDeclarationNode) { BLangFunction bLFunction; if (methodDeclarationNode.relativeResourcePath().isEmpty()) { bLFunction = createFunctionNode(methodDeclarationNode.methodName(), methodDeclarationNode.qualifierList(), methodDeclarationNode.methodSignature(), null); } else { bLFunction = createResourceFunctionNode(methodDeclarationNode.methodName(), methodDeclarationNode.qualifierList(), methodDeclarationNode.relativeResourcePath(), methodDeclarationNode.methodSignature(), null); } bLFunction.annAttachments = applyAll(getAnnotations(methodDeclarationNode.metadata())); bLFunction.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(methodDeclarationNode.metadata())); bLFunction.pos = getPositionWithoutMetadata(methodDeclarationNode); return bLFunction; } @Override public BLangNode transform(ResourcePathParameterNode resourcePathParameterNode) { BLangSimpleVariable pathParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); pathParam.name = createIdentifier(resourcePathParameterNode.paramName()); BLangType typeNode = (BLangType) resourcePathParameterNode.typeDescriptor().apply(this); pathParam.pos = getPosition(resourcePathParameterNode); pathParam.annAttachments = applyAll(resourcePathParameterNode.annotations()); if (resourcePathParameterNode.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) { BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode(); arrayTypeNode.elemtype = typeNode; arrayTypeNode.dimensions = 1; typeNode = arrayTypeNode; } pathParam.typeNode = typeNode; return pathParam; } private BLangFunction createResourceFunctionNode(IdentifierToken accessorName, NodeList<Token> qualifierList, NodeList<Node> relativeResourcePath, FunctionSignatureNode methodSignature, FunctionBodyNode functionBody) { BLangResourceFunction bLFunction = (BLangResourceFunction) TreeBuilder.createResourceFunctionNode(); String resourceFuncName = calculateResourceFunctionName(accessorName, relativeResourcePath); BLangIdentifier name = createIdentifier(getPosition(accessorName), resourceFuncName); populateFunctionNode(name, qualifierList, methodSignature, functionBody, bLFunction); bLFunction.methodName = createIdentifier(accessorName); bLFunction.resourcePath = new ArrayList<>(); List<BLangSimpleVariable> params = new ArrayList<>(); for (Node pathSegment : relativeResourcePath) { switch (pathSegment.kind()) { case SLASH_TOKEN: continue; case RESOURCE_PATH_SEGMENT_PARAM: BLangSimpleVariable param = (BLangSimpleVariable) pathSegment.apply(this); params.add(param); bLFunction.addPathParam(param); bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "*")); break; case RESOURCE_PATH_REST_PARAM: BLangSimpleVariable restParam = (BLangSimpleVariable) pathSegment.apply(this); params.add(restParam); bLFunction.setRestPathParam(restParam); bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "**")); break; default: bLFunction.resourcePath.add(createIdentifier((Token) pathSegment)); break; } } bLFunction.getParameters().addAll(0, params); return bLFunction; } private String calculateResourceFunctionName(IdentifierToken accessorName, NodeList<Node> relativeResourcePath) { StringBuilder sb = new StringBuilder(); sb.append("$"); sb.append(createIdentifier(accessorName).getValue()); for (Node token : relativeResourcePath) { switch (token.kind()) { case SLASH_TOKEN: continue; case RESOURCE_PATH_SEGMENT_PARAM: sb.append("$*"); break; case RESOURCE_PATH_REST_PARAM: sb.append("$**"); break; default: sb.append("$"); String value = createIdentifier((Token) token).getValue(); sb.append(value); } } return sb.toString(); } @Override public BLangNode transform(ConstantDeclarationNode constantDeclarationNode) { BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode(); Location pos = getPositionWithoutMetadata(constantDeclarationNode); Location identifierPos = getPosition(constantDeclarationNode.variableName()); constantNode.name = createIdentifier(identifierPos, constantDeclarationNode.variableName()); constantNode.expr = createExpression(constantDeclarationNode.initializer()); constantNode.pos = pos; if (constantDeclarationNode.typeDescriptor().isPresent()) { constantNode.typeNode = createTypeNode(constantDeclarationNode.typeDescriptor().orElse(null)); } constantNode.annAttachments = applyAll(getAnnotations(constantDeclarationNode.metadata())); constantNode.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(constantDeclarationNode.metadata())); constantNode.flagSet.add(Flag.CONSTANT); if (constantDeclarationNode.visibilityQualifier().isPresent() && constantDeclarationNode.visibilityQualifier().orElse(null).kind() == SyntaxKind.PUBLIC_KEYWORD) { constantNode.flagSet.add(Flag.PUBLIC); } NodeKind nodeKind = constantNode.expr.getKind(); if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { BLangLiteral literal = nodeKind == NodeKind.LITERAL ? (BLangLiteral) TreeBuilder.createLiteralExpression() : (BLangLiteral) TreeBuilder.createNumericLiteralExpression(); literal.setValue(((BLangLiteral) constantNode.expr).value); literal.setBType(constantNode.expr.getBType()); literal.isConstant = true; BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); finiteTypeNode.valueSpace.add(literal); BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(identifierPos, genName); typeDef.setName(anonTypeGenName); typeDef.flagSet.add(Flag.PUBLIC); typeDef.flagSet.add(Flag.ANONYMOUS); typeDef.typeNode = finiteTypeNode; typeDef.pos = pos; constantNode.associatedTypeDefinition = typeDef; } return constantNode; } public BLangNode transform(TypeDefinitionNode typeDefNode) { BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); BLangIdentifier identifierNode = this.createIdentifier(typeDefNode.typeName()); typeDef.setName(identifierNode); typeDef.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(typeDefNode.metadata())); typeDef.typeNode = createTypeNode(typeDefNode.typeDescriptor()); typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> { if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) { typeDef.flagSet.add(Flag.PUBLIC); } }); typeDef.pos = getPositionWithoutMetadata(typeDefNode); typeDef.annAttachments = applyAll(getAnnotations(typeDefNode.metadata())); return typeDef; } @Override private List<TypeDescriptorNode> flattenUnionType(UnionTypeDescriptorNode unionTypeDescriptorNode) { List<TypeDescriptorNode> list = new ArrayList<>(); flattenUnionType(list, unionTypeDescriptorNode); return list; } private void flattenUnionType(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescriptorNode) { if (typeDescriptorNode.kind() != SyntaxKind.UNION_TYPE_DESC) { list.add(typeDescriptorNode); return; } UnionTypeDescriptorNode unionTypeDescriptorNode = (UnionTypeDescriptorNode) typeDescriptorNode; updateListWithNonUnionTypes(list, unionTypeDescriptorNode.leftTypeDesc()); updateListWithNonUnionTypes(list, unionTypeDescriptorNode.rightTypeDesc()); } private void updateListWithNonUnionTypes(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescNode) { if (typeDescNode.kind() != SyntaxKind.UNION_TYPE_DESC) { list.add(typeDescNode); } else { flattenUnionType(list, typeDescNode); } } private <T> void reverseFlatMap(List<List<T>> listOfLists, List<T> result) { for (int i = listOfLists.size() - 1; i >= 0; i--) { result.addAll(listOfLists.get(i)); } } private BLangUserDefinedType deSugarTypeAsUserDefType(BLangType toIndirect) { BLangTypeDefinition bLTypeDef = createTypeDefinitionWithTypeNode(toIndirect); Location pos = toIndirect.pos; addToTop(bLTypeDef); return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), bLTypeDef.name); } private BLangTypeDefinition createTypeDefinitionWithTypeNode(BLangType toIndirect) { Location pos = toIndirect.pos; BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName); bLTypeDef.setName(anonTypeGenName); bLTypeDef.flagSet.add(Flag.PUBLIC); bLTypeDef.flagSet.add(Flag.ANONYMOUS); bLTypeDef.typeNode = toIndirect; bLTypeDef.pos = pos; return bLTypeDef; } @Override public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) { BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc()); typeNode.grouped = true; return typeNode; } @Override public BLangNode transform(TypeParameterNode typeParameterNode) { return createTypeNode(typeParameterNode.typeNode()); } @Override public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) { BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode(); SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc(); for (int i = 0; i < types.size(); i++) { Node node = types.get(i); if (node.kind() == SyntaxKind.REST_TYPE) { RestDescriptorNode restDescriptor = (RestDescriptorNode) node; tupleTypeNode.restParamType = createTypeNode(restDescriptor.typeDescriptor()); } else { tupleTypeNode.memberTypeNodes.add(createTypeNode(node)); } } tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode); return tupleTypeNode; } @Override public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) { if (parameterizedTypeDescNode.kind() == SyntaxKind.ERROR_TYPE_DESC) { return transformErrorTypeDescriptor(parameterizedTypeDescNode); } BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = getParameterizedTypeKind(parameterizedTypeDescNode.kind()); refType.pos = getPosition(parameterizedTypeDescNode); Optional<TypeParameterNode> typeParam = parameterizedTypeDescNode.typeParamNode(); if (typeParam.isPresent()) { BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode(); constrainedType.type = refType; constrainedType.constraint = createTypeNode(typeParam.get().typeNode()); constrainedType.pos = refType.pos; return constrainedType; } return refType; } private TypeKind getParameterizedTypeKind(SyntaxKind syntaxKind) { switch (syntaxKind) { case TYPEDESC_TYPE_DESC: return TypeKind.TYPEDESC; case FUTURE_TYPE_DESC: return TypeKind.FUTURE; case XML_TYPE_DESC: default: return TypeKind.XML; } } private BLangNode transformErrorTypeDescriptor(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) { BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode(); Optional<TypeParameterNode> typeParam = parameterizedTypeDescNode.typeParamNode(); errorType.pos = getPosition(parameterizedTypeDescNode); if (typeParam.isPresent()) { TypeParameterNode typeNode = typeParam.get(); errorType.detailType = createTypeNode(typeNode); } NonTerminalNode parent = parameterizedTypeDescNode.parent(); boolean isDistinctError = parent.kind() == SyntaxKind.DISTINCT_TYPE_DESC; if (isDistinctError) { parent = parent.parent(); } errorType.isAnonymous = checkIfAnonymous(parameterizedTypeDescNode); errorType.isLocal = this.isInLocalContext; if (parent.kind() != SyntaxKind.TYPE_DEFINITION && (isDistinctError || (!errorType.isLocal && typeParam.isPresent()))) { return deSugarTypeAsUserDefType(errorType); } return errorType; } private boolean isAnonymousTypeNode(TypeParameterNode typeNode) { SyntaxKind paramKind = typeNode.typeNode().kind(); if (paramKind == SyntaxKind.RECORD_TYPE_DESC || paramKind == SyntaxKind.OBJECT_TYPE_DESC || paramKind == SyntaxKind.ERROR_TYPE_DESC) { return checkIfAnonymous(typeNode); } return false; } @Override public BLangNode transform(DistinctTypeDescriptorNode distinctTypeDesc) { BLangType typeNode = createTypeNode(distinctTypeDesc.typeDescriptor()); typeNode.flagSet.add(Flag.DISTINCT); return typeNode; } @Override public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) { BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode(); for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.CLIENT_KEYWORD) { objectTypeNode.flagSet.add(Flag.CLIENT); continue; } if (kind == SyntaxKind.SERVICE_KEYWORD) { objectTypeNode.flagSet.add(SERVICE); continue; } if (kind == SyntaxKind.ISOLATED_KEYWORD) { objectTypeNode.flagSet.add(ISOLATED); continue; } throw new RuntimeException("Syntax kind is not supported: " + kind); } NodeList<Node> members = objTypeDescNode.members(); for (Node node : members) { BLangNode bLangNode = node.apply(this); if (bLangNode.getKind() == NodeKind.FUNCTION) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) { if (objectTypeNode.initFunction == null) { bLangFunction.objInitFunction = true; objectTypeNode.initFunction = bLangFunction; } else { objectTypeNode.addFunction(bLangFunction); } } else { objectTypeNode.addFunction(bLangFunction); } } else if (bLangNode.getKind() == NodeKind.RESOURCE_FUNC) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); objectTypeNode.addFunction(bLangFunction); dlog.error(getPosition(node), DiagnosticErrorCode.OBJECT_TYPE_DEF_DOES_NOT_ALLOW_RESOURCE_FUNC_DECL); } else if (bLangNode.getKind() == NodeKind.VARIABLE) { objectTypeNode.addField((BLangSimpleVariable) bLangNode); } else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) { objectTypeNode.addTypeReference((BLangType) bLangNode); } } objectTypeNode.pos = getPosition(objTypeDescNode); if (members.size() > 0) { objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(members.get(0))); objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(members.get(members.size() - 1))); } else { objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(objTypeDescNode.closeBrace())); objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(objTypeDescNode.openBrace())); } boolean isAnonymous = checkIfAnonymous(objTypeDescNode); objectTypeNode.isAnonymous = isAnonymous; if (!isAnonymous) { return objectTypeNode; } return deSugarTypeAsUserDefType(objectTypeNode); } public BLangClassDefinition transformObjectCtorExpressionBody(NodeList<Node> members) { BLangClassDefinition classDefinition = (BLangClassDefinition) TreeBuilder.createClassDefNode(); classDefinition.flagSet.add(Flag.ANONYMOUS); classDefinition.flagSet.add(Flag.OBJECT_CTOR); for (Node node : members) { BLangNode bLangNode = node.apply(this); NodeKind nodeKind = bLangNode.getKind(); if (nodeKind == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); if (!Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) { classDefinition.addFunction(bLangFunction); continue; } if (classDefinition.initFunction != null) { classDefinition.addFunction(bLangFunction); continue; } if (bLangFunction.requiredParams.size() != 0) { dlog.error(bLangFunction.pos, DiagnosticErrorCode.OBJECT_CTOR_INIT_CANNOT_HAVE_PARAMETERS); continue; } bLangFunction.objInitFunction = true; classDefinition.initFunction = bLangFunction; } else if (nodeKind == NodeKind.VARIABLE) { classDefinition.addField((BLangSimpleVariable) bLangNode); } else if (nodeKind == NodeKind.USER_DEFINED_TYPE) { dlog.error(bLangNode.pos, DiagnosticErrorCode.OBJECT_CTOR_DOES_NOT_SUPPORT_TYPE_REFERENCE_MEMBERS); } } classDefinition.internal = true; return classDefinition; } /** * Object constructor expression creates a class definition for the type defined through the object constructor. * Then add the class definition as a top level node. Using the class definition initialize the object defined in * the object constructor. Therefore this can be considered as a desugar. * example: * var objVariable = object { int n; }; * * class anonType0 { int n; } * var objVariable = new anonType0(); * * @param objectConstructorExpressionNode object ctor expression node * @return BLangTypeInit node which initialize the class definition */ @Override public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) { Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode); BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members()); anonClass.pos = pos; BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression(); objectCtorExpression.pos = pos; objectCtorExpression.classNode = anonClass; String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(pos, genName); anonClass.setName(anonTypeGenName); anonClass.flagSet.add(Flag.PUBLIC); Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference(); typeReference.ifPresent(typeReferenceNode -> { objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode)); }); anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations()); addToTop(anonClass); NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers(); for (Token qualifier : objectConstructorQualifierList) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.CLIENT_KEYWORD) { anonClass.flagSet.add(Flag.CLIENT); objectCtorExpression.isClient = true; } else if (kind == SyntaxKind.ISOLATED_KEYWORD) { anonClass.flagSet.add(Flag.ISOLATED); } else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) { anonClass.flagSet.add(SERVICE); objectCtorExpression.isService = true; } else { throw new RuntimeException("Syntax kind is not supported: " + kind); } } BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode(); BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name); BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = pos; initNode.userDefinedType = userDefinedType; BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier pkgAlias = createIdentifier(pos, ""); BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName)); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; objectCtorExpression.typeInit = initNode; return objectCtorExpression; } @Override public BLangNode transform(ObjectFieldNode objFieldNode) { BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(), objFieldNode.expression().orElse(null), objFieldNode.visibilityQualifier().orElse(null), getAnnotations(objFieldNode.metadata())); Optional<Node> doc = getDocumentationString(objFieldNode.metadata()); simpleVar.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc); NodeList<Token> qualifierList = objFieldNode.qualifierList(); for (Token token : qualifierList) { if (token.kind() == SyntaxKind.FINAL_KEYWORD) { addFinalQualifier(simpleVar); } else if (token.kind() == SyntaxKind.RESOURCE_KEYWORD) { addResourceQualifier(simpleVar); } } simpleVar.flagSet.add(Flag.FIELD); simpleVar.pos = getPositionWithoutMetadata(objFieldNode); return simpleVar; } private void addResourceQualifier(BLangSimpleVariable simpleVar) { simpleVar.flagSet.add(Flag.RESOURCE); } @Override public BLangNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) { BLangExprFunctionBody bLExprFunctionBody = (BLangExprFunctionBody) TreeBuilder.createExprFunctionBodyNode(); bLExprFunctionBody.expr = createExpression(expressionFunctionBodyNode.expression()); bLExprFunctionBody.pos = getPosition(expressionFunctionBodyNode); return bLExprFunctionBody; } @Override public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) { BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode(); boolean hasRestField = false; boolean isAnonymous = checkIfAnonymous(recordTypeDescriptorNode); for (Node field : recordTypeDescriptorNode.fields()) { if (field.kind() == SyntaxKind.RECORD_FIELD) { BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this); Optional<Node> doc = getDocumentationString(((RecordFieldNode) field).metadata()); bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc); recordTypeNode.fields.add(bLFiled); } else if (field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) { BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this); Optional<Node> doc = getDocumentationString(((RecordFieldWithDefaultValueNode) field).metadata()); bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc); recordTypeNode.fields.add(bLFiled); } else { recordTypeNode.addTypeReference(createTypeNode(field)); } } Optional<RecordRestDescriptorNode> recordRestDesc = recordTypeDescriptorNode.recordRestDescriptor(); if (recordRestDesc.isPresent()) { recordTypeNode.restFieldType = createTypeNode(recordRestDesc.get()); hasRestField = true; } boolean isOpen = recordTypeDescriptorNode.bodyStartDelimiter().kind() == SyntaxKind.OPEN_BRACE_TOKEN; recordTypeNode.sealed = !(hasRestField || isOpen); recordTypeNode.pos = getPosition(recordTypeDescriptorNode); recordTypeNode.isAnonymous = isAnonymous; recordTypeNode.isLocal = this.isInLocalContext; if (!isAnonymous || this.isInLocalContext) { return recordTypeNode; } return createAnonymousRecordType(recordTypeDescriptorNode, recordTypeNode); } @Override public BLangNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) { BLangFiniteTypeNode bLangFiniteTypeNode = new BLangFiniteTypeNode(); BLangLiteral simpleLiteral = createSimpleLiteral(singletonTypeDescriptorNode.simpleContExprNode()); bLangFiniteTypeNode.pos = simpleLiteral.pos; bLangFiniteTypeNode.valueSpace.add(simpleLiteral); return bLangFiniteTypeNode; } @Override public BLangNode transform(BuiltinSimpleNameReferenceNode singletonTypeDescriptorNode) { return createTypeNode(singletonTypeDescriptorNode); } @Override public BLangNode transform(TypeReferenceNode typeReferenceNode) { return createTypeNode(typeReferenceNode.typeName()); } @Override public BLangNode transform(RecordFieldNode recordFieldNode) { BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(), getAnnotations(recordFieldNode.metadata())); simpleVar.flagSet.add(Flag.PUBLIC); if (recordFieldNode.questionMarkToken().isPresent()) { simpleVar.flagSet.add(Flag.OPTIONAL); } else { simpleVar.flagSet.add(Flag.REQUIRED); } simpleVar.flagSet.add(Flag.FIELD); addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar); simpleVar.pos = getPositionWithoutMetadata(recordFieldNode); return simpleVar; } @Override public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) { BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(), getAnnotations(recordFieldNode.metadata())); simpleVar.flagSet.add(Flag.PUBLIC); if (isPresent(recordFieldNode.expression())) { simpleVar.setInitialExpression(createExpression(recordFieldNode.expression())); } addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar); simpleVar.pos = getPositionWithoutMetadata(recordFieldNode); return simpleVar; } private void addReadOnlyQualifier(Optional<Token> readonlyKeyword, BLangSimpleVariable simpleVar) { if (readonlyKeyword.isPresent()) { simpleVar.flagSet.add(Flag.READONLY); } } @Override public BLangNode transform(RecordRestDescriptorNode recordFieldNode) { return createTypeNode(recordFieldNode.typeName()); } @Override public BLangNode transform(FunctionDefinitionNode funcDefNode) { BLangFunction bLFunction; if (funcDefNode.relativeResourcePath().isEmpty()) { bLFunction = createFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(), funcDefNode.functionSignature(), funcDefNode.functionBody()); } else { bLFunction = createResourceFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(), funcDefNode.relativeResourcePath(), funcDefNode.functionSignature(), funcDefNode.functionBody()); } bLFunction.annAttachments = applyAll(getAnnotations(funcDefNode.metadata())); bLFunction.pos = getPositionWithoutMetadata(funcDefNode); bLFunction.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(funcDefNode.metadata())); return bLFunction; } private BLangFunction createFunctionNode(IdentifierToken funcName, NodeList<Token> qualifierList, FunctionSignatureNode functionSignature, FunctionBodyNode functionBody) { BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode(); BLangIdentifier name = createIdentifier(getPosition(funcName), funcName); populateFunctionNode(name, qualifierList, functionSignature, functionBody, bLFunction); return bLFunction; } private void populateFunctionNode(BLangIdentifier name, NodeList<Token> qualifierList, FunctionSignatureNode functionSignature, FunctionBodyNode functionBody, BLangFunction bLFunction) { bLFunction.name = name; setFunctionQualifiers(bLFunction, qualifierList); populateFuncSignature(bLFunction, functionSignature); if (functionBody == null) { bLFunction.body = null; bLFunction.flagSet.add(Flag.INTERFACE); bLFunction.interfaceFunction = true; } else { bLFunction.body = (BLangFunctionBody) functionBody.apply(this); if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) { bLFunction.flagSet.add(Flag.NATIVE); } } } private void setFunctionQualifiers(BLangFunction bLFunction, NodeList<Token> qualifierList) { for (Token qualifier : qualifierList) { switch (qualifier.kind()) { case PUBLIC_KEYWORD: bLFunction.flagSet.add(Flag.PUBLIC); break; case PRIVATE_KEYWORD: bLFunction.flagSet.add(Flag.PRIVATE); break; case REMOTE_KEYWORD: bLFunction.flagSet.add(Flag.REMOTE); break; case TRANSACTIONAL_KEYWORD: bLFunction.flagSet.add(Flag.TRANSACTIONAL); break; case RESOURCE_KEYWORD: bLFunction.flagSet.add(Flag.RESOURCE); break; case ISOLATED_KEYWORD: bLFunction.flagSet.add(Flag.ISOLATED); break; default: continue; } } } @Override public BLangNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) { BLangExternalFunctionBody externFunctionBodyNode = (BLangExternalFunctionBody) TreeBuilder.createExternFunctionBodyNode(); externFunctionBodyNode.annAttachments = applyAll(externalFunctionBodyNode.annotations()); externFunctionBodyNode.pos = getPosition(externalFunctionBodyNode); return externFunctionBodyNode; } @Override public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) { BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode(); Location pos = getPosition(anonFuncExprNode); bLFunction.name = createIdentifier(symTable.builtinPos, anonymousModelHelper.getNextAnonymousFunctionKey(packageID)); populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature()); bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this); bLFunction.pos = pos; bLFunction.addFlag(Flag.LAMBDA); bLFunction.addFlag(Flag.ANONYMOUS); setFunctionQualifiers(bLFunction, anonFuncExprNode.qualifierList()); addToTop(bLFunction); BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaExpr.function = bLFunction; lambdaExpr.pos = pos; return lambdaExpr; } @Override public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) { BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); this.isInLocalContext = true; List<BLangStatement> statements = new ArrayList<>(); if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) { NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get(); generateAndAddBLangStatements(namedWorkerDeclarator.workerInitStatements(), statements); for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) { statements.add((BLangStatement) workerDeclarationNode.apply(this)); while (!this.additionalStatements.empty()) { statements.add(additionalStatements.pop()); } } } generateAndAddBLangStatements(functionBodyBlockNode.statements(), statements); bLFuncBody.stmts = statements; bLFuncBody.pos = getPosition(functionBodyBlockNode); this.isInLocalContext = false; return bLFuncBody; } @Override public BLangNode transform(ForEachStatementNode forEachStatementNode) { BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = getPosition(forEachStatementNode); TypedBindingPatternNode typedBindingPatternNode = forEachStatementNode.typedBindingPattern(); VariableDefinitionNode variableDefinitionNode = createBLangVarDef(getPosition(typedBindingPatternNode), typedBindingPatternNode, Optional.empty(), Optional.empty()); foreach.setVariableDefinitionNode(variableDefinitionNode); foreach.isDeclaredWithVar = typedBindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; BLangBlockStmt foreachBlock = (BLangBlockStmt) forEachStatementNode.blockStatement().apply(this); foreachBlock.pos = getPosition(forEachStatementNode.blockStatement()); foreach.setBody(foreachBlock); foreach.setCollection(createExpression(forEachStatementNode.actionOrExpressionNode())); forEachStatementNode.onFailClause().ifPresent(onFailClauseNode -> { foreach.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return foreach; } @Override public BLangNode transform(ForkStatementNode forkStatementNode) { BLangForkJoin forkJoin = (BLangForkJoin) TreeBuilder.createForkJoinNode(); Location forkStmtPos = getPosition(forkStatementNode); forkJoin.pos = forkStmtPos; return forkJoin; } @Override public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) { BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode(); Location workerBodyPos = getPosition(namedWorkerDeclNode.workerBody()); bLFunction.name = createIdentifier(symTable.builtinPos, anonymousModelHelper.getNextAnonymousFunctionKey(packageID)); BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this); BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); bodyNode.stmts = blockStmt.stmts; bodyNode.pos = workerBodyPos; bLFunction.body = bodyNode; bLFunction.internal = true; bLFunction.pos = workerBodyPos; bLFunction.addFlag(Flag.LAMBDA); bLFunction.addFlag(Flag.ANONYMOUS); bLFunction.addFlag(Flag.WORKER); if (namedWorkerDeclNode.transactionalKeyword().isPresent()) { bLFunction.addFlag(Flag.TRANSACTIONAL); } String workerName = namedWorkerDeclNode.workerName().text(); if (namedWorkerDeclNode.workerName().isMissing() || workerName.equals(IDENTIFIER_LITERAL_PREFIX)) { workerName = missingNodesHelper.getNextMissingNodeName(packageID); } if (workerName.startsWith(IDENTIFIER_LITERAL_PREFIX)) { bLFunction.defaultWorkerName.originalValue = workerName; workerName = IdentifierUtils.unescapeUnicodeCodepoints(workerName.substring(1)); } bLFunction.defaultWorkerName.value = workerName; bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName()); NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations(); bLFunction.annAttachments = applyAll(annotations); Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc(); if (retNode.isPresent()) { ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get(); bLFunction.setReturnTypeNode(createTypeNode(returnType.type())); } else { BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode(); bLValueType.pos = getPosition(namedWorkerDeclNode); bLValueType.typeKind = TypeKind.NIL; bLFunction.setReturnTypeNode(bLValueType); } addToTop(bLFunction); BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaExpr.function = bLFunction; lambdaExpr.pos = workerBodyPos; lambdaExpr.internal = true; String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName; Location workerNamePos = getPosition(namedWorkerDeclNode.workerName()); BLangSimpleVariable var = new SimpleVarBuilder() .with(workerLambdaName, workerNamePos) .setExpression(lambdaExpr) .isDeclaredWithVar() .isFinal() .build(); if (namedWorkerDeclNode.transactionalKeyword().isPresent()) { var.addFlag(Flag.TRANSACTIONAL); } BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode(); lamdaWrkr.pos = workerBodyPos; var.pos = workerBodyPos; lamdaWrkr.setVariable(var); lamdaWrkr.isWorker = true; lamdaWrkr.internal = var.internal = true; if (namedWorkerDeclNode.parent().kind() == SyntaxKind.FORK_STATEMENT) { lamdaWrkr.isInFork = true; lamdaWrkr.var.flagSet.add(Flag.FORKED); } BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation(); BLangIdentifier nameInd = this.createIdentifier(workerNamePos, workerLambdaName); BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(), nameInd); bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias; bLInvocation.name = (BLangIdentifier) reference.name; bLInvocation.pos = workerNamePos; bLInvocation.flagSet = new HashSet<>(); bLInvocation.annAttachments = bLFunction.annAttachments; if (bLInvocation.getKind() == NodeKind.INVOCATION) { bLInvocation.async = true; } else { dlog.error(workerBodyPos, DiagnosticErrorCode.START_REQUIRE_INVOCATION); } BLangSimpleVariable invoc = new SimpleVarBuilder() .with(workerName, workerNamePos) .isDeclaredWithVar() .isWorkerVar() .setExpression(bLInvocation) .isFinal() .setPos(workerNamePos) .build(); BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode(); workerInvoc.pos = workerNamePos; workerInvoc.setVariable(invoc); workerInvoc.isWorker = true; invoc.flagSet.add(Flag.WORKER); this.additionalStatements.push(workerInvoc); return lamdaWrkr; } private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) { ArrayList<A> annAttachments = new ArrayList<>(); if (annotations == null) { return annAttachments; } for (B annotation : annotations) { A blNode = (A) annotation.apply(this); annAttachments.add(blNode); } return annAttachments; } @Override public BLangNode transform(AnnotationNode annotation) { Node name = annotation.annotReference(); BLangAnnotationAttachment bLAnnotationAttachment = (BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode(); if (annotation.annotValue().isPresent()) { MappingConstructorExpressionNode map = annotation.annotValue().get(); BLangExpression bLExpression = (BLangExpression) map.apply(this); bLAnnotationAttachment.setExpression(bLExpression); } BLangNameReference nameReference = createBLangNameReference(name); bLAnnotationAttachment.setAnnotationName(nameReference.name); bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias); bLAnnotationAttachment.pos = getPosition(annotation); return bLAnnotationAttachment; } @Override public BLangNode transform(QueryActionNode queryActionNode) { BLangQueryAction bLQueryAction = (BLangQueryAction) TreeBuilder.createQueryActionNode(); BLangDoClause doClause = (BLangDoClause) TreeBuilder.createDoClauseNode(); doClause.body = (BLangBlockStmt) queryActionNode.blockStatement().apply(this); doClause.body.pos = expandLeft(doClause.body.pos, getPosition(queryActionNode.doKeyword())); doClause.pos = doClause.body.pos; bLQueryAction.queryClauseList.add(queryActionNode.queryPipeline().fromClause().apply(this)); bLQueryAction.queryClauseList.addAll(applyAll(queryActionNode.queryPipeline().intermediateClauses())); bLQueryAction.queryClauseList.add(doClause); bLQueryAction.doClause = doClause; bLQueryAction.pos = getPosition(queryActionNode); return bLQueryAction; } @Override public BLangNode transform(AnnotationDeclarationNode annotationDeclarationNode) { BLangAnnotation annotationDecl = (BLangAnnotation) TreeBuilder.createAnnotationNode(); Location pos = getPositionWithoutMetadata(annotationDeclarationNode); annotationDecl.pos = pos; annotationDecl.name = createIdentifier(annotationDeclarationNode.annotationTag()); if (annotationDeclarationNode.visibilityQualifier().isPresent()) { annotationDecl.addFlag(Flag.PUBLIC); } if (annotationDeclarationNode.constKeyword().isPresent()) { annotationDecl.addFlag(Flag.CONSTANT); } annotationDecl.annAttachments = applyAll(getAnnotations(annotationDeclarationNode.metadata())); annotationDecl.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(annotationDeclarationNode.metadata())); Optional<Node> typedesc = annotationDeclarationNode.typeDescriptor(); if (typedesc.isPresent()) { annotationDecl.typeNode = createTypeNode(typedesc.get()); } SeparatedNodeList<Node> paramList = annotationDeclarationNode.attachPoints(); for (Node child : paramList) { AnnotationAttachPointNode attachPoint = (AnnotationAttachPointNode) child; boolean source = attachPoint.sourceKeyword().isPresent(); AttachPoint bLAttachPoint; NodeList<Token> idents = attachPoint.identifiers(); Token firstIndent = idents.get(0); switch (firstIndent.kind()) { case OBJECT_KEYWORD: Token secondIndent = idents.get(1); switch (secondIndent.kind()) { case FUNCTION_KEYWORD: bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_METHOD.getValue(), source); break; case FIELD_KEYWORD: bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_FIELD.getValue(), source); break; default: throw new RuntimeException("Syntax kind is not supported: " + secondIndent.kind()); } break; case SERVICE_KEYWORD: String value; if (idents.size() == 1) { value = AttachPoint.Point.SERVICE.getValue(); } else if (idents.size() == 3) { value = AttachPoint.Point.SERVICE_REMOTE.getValue(); } else { throw new RuntimeException("Invalid annotation attach point"); } bLAttachPoint = AttachPoint.getAttachmentPoint(value, source); break; case RECORD_KEYWORD: bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.RECORD_FIELD.getValue(), source); break; default: bLAttachPoint = AttachPoint.getAttachmentPoint(firstIndent.text(), source); } annotationDecl.addAttachPoint(bLAttachPoint); } return annotationDecl; } @Override public BLangNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) { BLangAnnotAccessExpr annotAccessExpr = (BLangAnnotAccessExpr) TreeBuilder.createAnnotAccessExpressionNode(); Node annotTagReference = annotAccessExpressionNode.annotTagReference(); if (annotAccessExpressionNode.annotTagReference().kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) { SimpleNameReferenceNode annotName = (SimpleNameReferenceNode) annotTagReference; annotAccessExpr.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); annotAccessExpr.annotationName = createIdentifier(annotName.name()); } else { QualifiedNameReferenceNode qulifiedName = (QualifiedNameReferenceNode) annotTagReference; annotAccessExpr.pkgAlias = createIdentifier(qulifiedName.modulePrefix()); annotAccessExpr.annotationName = createIdentifier(qulifiedName.identifier()); } annotAccessExpr.pos = getPosition(annotAccessExpressionNode); annotAccessExpr.expr = createExpression(annotAccessExpressionNode.expression()); return annotAccessExpr; } @Override public BLangNode transform(ConditionalExpressionNode conditionalExpressionNode) { BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode(); ternaryExpr.pos = getPosition(conditionalExpressionNode); ternaryExpr.elseExpr = createExpression(conditionalExpressionNode.endExpression()); ternaryExpr.thenExpr = createExpression(conditionalExpressionNode.middleExpression()); ternaryExpr.expr = createExpression(conditionalExpressionNode.lhsExpression()); if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) { BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr; BLangTernaryExpr parent = root; while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) { parent = (BLangTernaryExpr) parent.elseExpr; } ternaryExpr.expr = parent.elseExpr; parent.elseExpr = ternaryExpr; ternaryExpr = root; } return ternaryExpr; } @Override public BLangNode transform(CheckExpressionNode checkExpressionNode) { Location pos = getPosition(checkExpressionNode); BLangExpression expr = createExpression(checkExpressionNode.expression()); if (checkExpressionNode.checkKeyword().kind() == SyntaxKind.CHECK_KEYWORD) { return createCheckExpr(pos, expr); } return createCheckPanickedExpr(pos, expr); } @Override public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) { BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode(); typeTestExpr.expr = createExpression(typeTestExpressionNode.expression()); typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor()); typeTestExpr.pos = getPosition(typeTestExpressionNode); return typeTestExpr; } @Override public BLangNode transform(MappingConstructorExpressionNode mapConstruct) { BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); for (MappingFieldNode field : mapConstruct.fields()) { if (field.kind() == SyntaxKind.SPREAD_FIELD) { SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field; BLangRecordSpreadOperatorField bLRecordSpreadOpField = (BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField(); bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr()); bLRecordSpreadOpField.pos = getPosition(spreadFieldNode); bLiteralNode.fields.add(bLRecordSpreadOpField); } else if (field.kind() == SyntaxKind.COMPUTED_NAME_FIELD) { ComputedNameFieldNode computedNameField = (ComputedNameFieldNode) field; BLangRecordKeyValueField bLRecordKeyValueField = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); bLRecordKeyValueField.valueExpr = createExpression(computedNameField.valueExpr()); bLRecordKeyValueField.key = new BLangRecordLiteral.BLangRecordKey(createExpression(computedNameField.fieldNameExpr())); bLRecordKeyValueField.key.computedKey = true; bLiteralNode.fields.add(bLRecordKeyValueField); } else { SpecificFieldNode specificField = (SpecificFieldNode) field; io.ballerina.compiler.syntax.tree.ExpressionNode valueExpr = specificField.valueExpr().orElse(null); if (valueExpr == null) { BLangRecordLiteral.BLangRecordVarNameField fieldVar = (BLangRecordLiteral.BLangRecordVarNameField) TreeBuilder.createRecordVarRefNameFieldNode(); fieldVar.variableName = createIdentifier((Token) ((SpecificFieldNode) field).fieldName()); fieldVar.pkgAlias = createIdentifier(null, ""); fieldVar.pos = fieldVar.variableName.pos; fieldVar.readonly = specificField.readonlyKeyword().isPresent(); bLiteralNode.fields.add(fieldVar); } else { BLangRecordKeyValueField bLRecordKeyValueField = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); bLRecordKeyValueField.pos = getPosition(specificField); bLRecordKeyValueField.readonly = specificField.readonlyKeyword().isPresent(); bLRecordKeyValueField.valueExpr = createExpression(valueExpr); bLRecordKeyValueField.key = new BLangRecordLiteral.BLangRecordKey(createExpression(specificField.fieldName())); bLRecordKeyValueField.key.computedKey = false; bLRecordKeyValueField.key.pos = getPosition(specificField.fieldName()); bLiteralNode.fields.add(bLRecordKeyValueField); } } } bLiteralNode.pos = getPosition(mapConstruct); return bLiteralNode; } @Override public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) { List<BLangExpression> argExprList = new ArrayList<>(); BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr) TreeBuilder.createListConstructorExpressionNode(); for (Node expr : listConstructorExprNode.expressions()) { argExprList.add(createExpression(expr)); } listConstructorExpr.exprs = argExprList; listConstructorExpr.pos = getPosition(listConstructorExprNode); return listConstructorExpr; } @Override public BLangNode transform(UnaryExpressionNode unaryExprNode) { Location pos = getPosition(unaryExprNode); SyntaxKind expressionKind = unaryExprNode.expression().kind(); SyntaxKind unaryOperatorKind = unaryExprNode.unaryOperator().kind(); if (expressionKind == SyntaxKind.NUMERIC_LITERAL && (unaryOperatorKind == SyntaxKind.MINUS_TOKEN || unaryOperatorKind == SyntaxKind.PLUS_TOKEN)) { return createSimpleLiteral(unaryExprNode); } OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text()); BLangExpression expr = createExpression(unaryExprNode.expression()); return createBLangUnaryExpr(pos, operator, expr); } @Override public BLangNode transform(TypeofExpressionNode typeofExpressionNode) { Location pos = getPosition(typeofExpressionNode); OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text()); BLangExpression expr = createExpression(typeofExpressionNode.expression()); return createBLangUnaryExpr(pos, operator, expr); } @Override public BLangNode transform(BinaryExpressionNode binaryExprNode) { if (binaryExprNode.operator().kind() == SyntaxKind.ELVIS_TOKEN) { BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode(); elvisExpr.pos = getPosition(binaryExprNode); elvisExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr()); elvisExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr()); return elvisExpr; } BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); bLBinaryExpr.pos = getPosition(binaryExprNode); bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr()); bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr()); bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text()); return bLBinaryExpr; } @Override public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) { BLangFieldBasedAccess bLFieldBasedAccess; Node fieldName = fieldAccessExprNode.fieldName(); if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName; BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder.createFieldBasedAccessWithPrefixNode(); accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix()); accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier()); bLFieldBasedAccess = accessWithPrefixNode; bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS; } else { bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode(); bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name()); bLFieldBasedAccess.fieldKind = FieldKind.SINGLE; } io.ballerina.compiler.syntax.tree.ExpressionNode containerExpr = fieldAccessExprNode.expression(); if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) { bLFieldBasedAccess.expr = createExpression(((BracedExpressionNode) containerExpr).expression()); } else { bLFieldBasedAccess.expr = createExpression(containerExpr); } bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode); bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode.fieldName()); bLFieldBasedAccess.optionalFieldAccess = false; return bLFieldBasedAccess; } @Override public BLangNode transform(OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) { BLangFieldBasedAccess bLFieldBasedAccess; Node fieldName = optionalFieldAccessExpressionNode.fieldName(); if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName; BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder .createFieldBasedAccessWithPrefixNode(); accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix()); accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier()); bLFieldBasedAccess = accessWithPrefixNode; bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS; } else { bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode(); bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name()); bLFieldBasedAccess.fieldKind = FieldKind.SINGLE; } bLFieldBasedAccess.pos = getPosition(optionalFieldAccessExpressionNode); bLFieldBasedAccess.field.pos = getPosition(optionalFieldAccessExpressionNode.fieldName()); bLFieldBasedAccess.expr = createExpression(optionalFieldAccessExpressionNode.expression()); bLFieldBasedAccess.optionalFieldAccess = true; return bLFieldBasedAccess; } @Override public BLangNode transform(BracedExpressionNode brcExprOut) { return createExpression(brcExprOut.expression()); } @Override public BLangNode transform(FunctionCallExpressionNode functionCallNode) { return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(), getPosition(functionCallNode), isFunctionCallAsync(functionCallNode)); } @Override public BLangNode transform(ErrorConstructorExpressionNode errorConstructorExprNode) { BLangErrorConstructorExpr errorConstructorExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode(); errorConstructorExpr.pos = getPosition(errorConstructorExprNode); if (errorConstructorExprNode.typeReference().isPresent()) { errorConstructorExpr.errorTypeRef = (BLangUserDefinedType) createTypeNode(errorConstructorExprNode.typeReference().get()); } List<BLangExpression> positionalArgs = new ArrayList<>(); List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (Node argNode : errorConstructorExprNode.arguments()) { if (argNode.kind() == SyntaxKind.POSITIONAL_ARG) { positionalArgs.add((BLangExpression) transform((PositionalArgumentNode) argNode)); } else if (argNode.kind() == SyntaxKind.NAMED_ARG) { namedArgs.add((BLangNamedArgsExpression) transform((NamedArgumentNode) argNode)); } } errorConstructorExpr.positionalArgs = positionalArgs; errorConstructorExpr.namedArgs = namedArgs; return errorConstructorExpr; } public BLangNode transform(MethodCallExpressionNode methodCallExprNode) { BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(), methodCallExprNode.arguments(), getPosition(methodCallExprNode), false); bLInvocation.expr = createExpression(methodCallExprNode.expression()); return bLInvocation; } @Override public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) { BLangTypeInit initNode = createTypeInit(implicitNewExprNode); BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword()); initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; return initNode; } @Override public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) { BLangTypeInit initNode = createTypeInit(explicitNewExprNode); BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword()); initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; return initNode; } private boolean isFunctionCallAsync(FunctionCallExpressionNode functionCallExpressionNode) { return functionCallExpressionNode.parent().kind() == SyntaxKind.START_ACTION; } private BLangTypeInit createTypeInit(NewExpressionNode expression) { BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = getPosition(expression); if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) { Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor(); initNode.userDefinedType = createTypeNode(type); } return initNode; } private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = getPosition(expression); populateArgsInvocation(expression, invocationNode); BLangNameReference nameReference = createBLangNameReference(newKeyword); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; return invocationNode; } private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) { Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression); if (argumentsIter != null) { while (argumentsIter.hasNext()) { BLangExpression argument = createExpression(argumentsIter.next()); invocationNode.argExprs.add(argument); } } } private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) { Iterator<FunctionArgumentNode> argumentsIter = null; if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) { Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList(); if (argsList.isPresent()) { ParenthesizedArgList argList = argsList.get(); argumentsIter = argList.arguments().iterator(); } } else { ParenthesizedArgList argList = (ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList(); argumentsIter = argList.arguments().iterator(); } return argumentsIter; } @Override public BLangNode transform(IndexedExpressionNode indexedExpressionNode) { BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); indexBasedAccess.pos = getPosition(indexedExpressionNode); SeparatedNodeList<io.ballerina.compiler.syntax.tree.ExpressionNode> keys = indexedExpressionNode.keyExpression(); if (keys.size() == 1) { indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression().get(0)); } else { BLangTableMultiKeyExpr multiKeyExpr = (BLangTableMultiKeyExpr) TreeBuilder.createTableMultiKeyExpressionNode(); multiKeyExpr.pos = getPosition(keys.get(0), keys.get(keys.size() - 1)); List<BLangExpression> multiKeyIndexExprs = new ArrayList<>(); for (io.ballerina.compiler.syntax.tree.ExpressionNode keyExpr : keys) { multiKeyIndexExprs.add(createExpression(keyExpr)); } multiKeyExpr.multiKeyIndexExprs = multiKeyIndexExprs; indexBasedAccess.indexExpr = multiKeyExpr; } Node containerExpr = indexedExpressionNode.containerExpression(); BLangExpression expression = createExpression(containerExpr); if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) { indexBasedAccess.expr = ((BLangGroupExpr) expression).expression; BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode(); group.expression = indexBasedAccess; group.pos = getPosition(indexedExpressionNode); return group; } else if (containerExpr.kind() == SyntaxKind.XML_STEP_EXPRESSION) { ((BLangXMLNavigationAccess) expression).childIndex = indexBasedAccess.indexExpr; return expression; } indexBasedAccess.expr = expression; return indexBasedAccess; } @Override public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) { BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); typeConversionNode.pos = getPosition(typeCastExpressionNode); TypeCastParamNode typeCastParamNode = typeCastExpressionNode.typeCastParam(); if (typeCastParamNode != null && typeCastParamNode.type().isPresent()) { typeConversionNode.typeNode = createTypeNode(typeCastParamNode.type().get()); } typeConversionNode.expr = createExpression(typeCastExpressionNode.expression()); typeConversionNode.annAttachments = applyAll(typeCastParamNode.annotations()); return typeConversionNode; } @Override public BLangNode transform(Token token) { SyntaxKind kind = token.kind(); switch (kind) { case XML_TEXT_CONTENT: case TEMPLATE_STRING: case CLOSE_BRACE_TOKEN: return createSimpleLiteral(token); default: throw new RuntimeException("Syntax kind is not supported: " + kind); } } @Override public BLangNode transform(InterpolationNode interpolationNode) { return createExpression(interpolationNode.expression()); } @Override public BLangNode transform(TemplateExpressionNode expressionNode) { SyntaxKind kind = expressionNode.kind(); switch (kind) { case XML_TEMPLATE_EXPRESSION: BLangNode xmlTemplateLiteral = createXmlTemplateLiteral(expressionNode); xmlTemplateLiteral.pos = getPosition(expressionNode); return xmlTemplateLiteral; case STRING_TEMPLATE_EXPRESSION: return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode)); case RAW_TEMPLATE_EXPRESSION: return createRawTemplateLiteral(expressionNode.content(), getPosition(expressionNode)); default: throw new RuntimeException("Syntax kind is not supported: " + kind); } } @Override public BLangNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) { BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode(); tableConstructorExpr.pos = getPosition(tableConstructorExpressionNode); for (Node row : tableConstructorExpressionNode.rows()) { tableConstructorExpr.addRecordLiteral((BLangRecordLiteral) row.apply(this)); } if (tableConstructorExpressionNode.keySpecifier().isPresent()) { tableConstructorExpr.tableKeySpecifier = (BLangTableKeySpecifier) tableConstructorExpressionNode.keySpecifier().orElse(null).apply(this); } return tableConstructorExpr; } @Override public BLangNode transform(TrapExpressionNode trapExpressionNode) { BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.expr = createExpression(trapExpressionNode.expression()); trapExpr.pos = getPosition(trapExpressionNode); return trapExpr; } @Override public BLangNode transform(ReceiveActionNode receiveActionNode) { BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode(); Node receiveWorkers = receiveActionNode.receiveWorkers(); Token workerName; if (receiveWorkers.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) { workerName = ((SimpleNameReferenceNode) receiveWorkers).name(); } else { Location receiveFieldsPos = getPosition(receiveWorkers); dlog.error(receiveFieldsPos, DiagnosticErrorCode.MULTIPLE_RECEIVE_ACTION_NOT_YET_SUPPORTED); workerName = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN, NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList()); } workerReceiveExpr.setWorkerName(createIdentifier(workerName)); workerReceiveExpr.pos = getPosition(receiveActionNode); return workerReceiveExpr; } @Override public BLangNode transform(SyncSendActionNode syncSendActionNode) { BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode(); workerSendExpr.setWorkerName(createIdentifier( syncSendActionNode.peerWorker().name())); workerSendExpr.expr = createExpression(syncSendActionNode.expression()); workerSendExpr.pos = getPosition(syncSendActionNode); return workerSendExpr; } @Override public BLangNode transform(ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) { BLangArrowFunction arrowFunction = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode(); arrowFunction.pos = getPosition(implicitAnonymousFunctionExpressionNode); arrowFunction.functionName = createIdentifier(arrowFunction.pos, anonymousModelHelper.getNextAnonymousFunctionKey(packageID)); Node param = implicitAnonymousFunctionExpressionNode.params(); if (param.kind() == SyntaxKind.INFER_PARAM_LIST) { ImplicitAnonymousFunctionParameters paramsNode = (ImplicitAnonymousFunctionParameters) param; SeparatedNodeList<SimpleNameReferenceNode> paramList = paramsNode.parameters(); for (SimpleNameReferenceNode child : paramList) { BLangUserDefinedType userDefinedType = (BLangUserDefinedType) child.apply(this); BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); parameter.name = userDefinedType.typeName; parameter.pos = getPosition(child); arrowFunction.params.add(parameter); } } else { BLangUserDefinedType userDefinedType = (BLangUserDefinedType) param.apply(this); BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); parameter.name = userDefinedType.typeName; parameter.pos = getPosition(param); arrowFunction.params.add(parameter); } arrowFunction.body = new BLangExprFunctionBody(); arrowFunction.body.expr = createExpression(implicitAnonymousFunctionExpressionNode.expression()); arrowFunction.body.pos = arrowFunction.body.expr.pos; return arrowFunction; } @Override public BLangNode transform(CommitActionNode commitActionNode) { BLangCommitExpr commitExpr = TreeBuilder.createCommitExpressionNode(); commitExpr.pos = getPosition(commitActionNode); return commitExpr; } @Override public BLangNode transform(FlushActionNode flushActionNode) { BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode(); Node optionalPeerWorker = flushActionNode.peerWorker().orElse(null); if (optionalPeerWorker != null) { SimpleNameReferenceNode peerWorker = (SimpleNameReferenceNode) optionalPeerWorker; workerFlushExpr.workerIdentifier = createIdentifier(peerWorker.name()); } workerFlushExpr.pos = getPosition(flushActionNode); return workerFlushExpr; } @Override public BLangNode transform(LetExpressionNode letExpressionNode) { BLangLetExpression letExpr = (BLangLetExpression) TreeBuilder.createLetExpressionNode(); letExpr.pos = getPosition(letExpressionNode); letExpr.expr = createExpression(letExpressionNode.expression()); List<BLangLetVariable> letVars = new ArrayList<>(); for (LetVariableDeclarationNode letVarDecl : letExpressionNode.letVarDeclarations()) { letVars.add(createLetVariable(letVarDecl)); } letExpr.letVarDeclarations = letVars; return letExpr; } public BLangLetVariable createLetVariable(LetVariableDeclarationNode letVarDecl) { BLangLetVariable letVar = TreeBuilder.createLetVariableNode(); VariableDefinitionNode varDefNode = createBLangVarDef(getPosition(letVarDecl), letVarDecl.typedBindingPattern(), Optional.of(letVarDecl.expression()), Optional.empty()); varDefNode.getVariable().addFlag(Flag.FINAL); List<BLangNode> annots = applyAll(letVarDecl.annotations()); for (BLangNode node : annots) { varDefNode.getVariable().addAnnotationAttachment((AnnotationAttachmentNode) node); } letVar.definitionNode = varDefNode; return letVar; } @Override public BLangNode transform(MappingBindingPatternNode mappingBindingPatternNode) { BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode(); recordVarRef.pos = getPosition(mappingBindingPatternNode); List<BLangRecordVarRefKeyValue> expressions = new ArrayList<>(); for (BindingPatternNode expr : mappingBindingPatternNode.fieldBindingPatterns()) { if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) { recordVarRef.restParam = createExpression(expr); } else { expressions.add(createRecordVarKeyValue(expr)); } } recordVarRef.recordRefFields = expressions; return recordVarRef; } private BLangRecordVarRefKeyValue createRecordVarKeyValue(BindingPatternNode expr) { BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue(); if (expr instanceof FieldBindingPatternFullNode) { FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) expr; keyValue.variableName = createIdentifier(fullNode.variableName().name()); keyValue.variableReference = createExpression(fullNode.bindingPattern()); } else { FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) expr; keyValue.variableName = createIdentifier(varnameNode.variableName().name()); BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = getPosition(varnameNode.variableName()); varRef.variableName = createIdentifier(varnameNode.variableName().name()); varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); keyValue.variableReference = varRef; } return keyValue; } @Override public BLangNode transform(ListBindingPatternNode listBindingPatternNode) { BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode(); List<BLangExpression> expressions = new ArrayList<>(); for (BindingPatternNode expr : listBindingPatternNode.bindingPatterns()) { if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) { tupleVarRef.restParam = createExpression(expr); } else { expressions.add(createExpression(expr)); } } tupleVarRef.expressions = expressions; tupleVarRef.pos = getPosition(listBindingPatternNode); return tupleVarRef; } @Override public BLangNode transform(RestBindingPatternNode restBindingPatternNode) { return createExpression(restBindingPatternNode.variableName()); } @Override public BLangNode transform(CaptureBindingPatternNode captureBindingPatternNode) { return createExpression(captureBindingPatternNode.variableName()); } @Override public BLangNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) { BLangSimpleVarRef ignoreVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); BLangIdentifier ignore = (BLangIdentifier) TreeBuilder.createIdentifierNode(); ignore.value = Names.IGNORE.value; ignoreVarRef.variableName = ignore; ignore.pos = getPosition(wildcardBindingPatternNode); return ignoreVarRef; } @Override public BLangNode transform(ErrorBindingPatternNode errorBindingPatternNode) { BLangErrorVarRef errorVarRef = (BLangErrorVarRef) TreeBuilder.createErrorVariableReferenceNode(); errorVarRef.pos = getPosition(errorBindingPatternNode); Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference(); if (errorTypeRef.isPresent()) { errorVarRef.typeNode = createTypeNode(errorTypeRef.get()); } SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns(); int numberOfArgs = argListBindingPatterns.size(); List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (int position = 0; position < numberOfArgs; position++) { BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position); switch (bindingPatternNode.kind()) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: if (position == 0) { errorVarRef.message = (BLangVariableReference) createExpression(bindingPatternNode); break; } case ERROR_BINDING_PATTERN: errorVarRef.cause = (BLangVariableReference) createExpression(bindingPatternNode); break; case NAMED_ARG_BINDING_PATTERN: namedArgs.add((BLangNamedArgsExpression) bindingPatternNode.apply(this)); break; default: errorVarRef.restVar = (BLangVariableReference) createExpression(bindingPatternNode); } } errorVarRef.detail = namedArgs; return errorVarRef; } @Override public BLangNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) { BLangNamedArgsExpression namedArgsExpression = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode(); namedArgsExpression.pos = getPosition(namedArgBindingPatternNode); namedArgsExpression.name = createIdentifier(namedArgBindingPatternNode.argName()); namedArgsExpression.expr = createExpression(namedArgBindingPatternNode.bindingPattern()); return namedArgsExpression; } @Override public BLangNode transform(ReturnStatementNode returnStmtNode) { BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode(); bLReturn.pos = getPosition(returnStmtNode); if (returnStmtNode.expression().isPresent()) { bLReturn.expr = createExpression(returnStmtNode.expression().get()); } else { BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); nilLiteral.pos = getPosition(returnStmtNode); nilLiteral.value = Names.NIL_VALUE; nilLiteral.setBType(symTable.nilType); bLReturn.expr = nilLiteral; } return bLReturn; } @Override public BLangNode transform(PanicStatementNode panicStmtNode) { BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode(); bLPanic.pos = getPosition(panicStmtNode); bLPanic.expr = createExpression(panicStmtNode.expression()); return bLPanic; } @Override public BLangNode transform(ContinueStatementNode continueStmtNode) { BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode(); bLContinue.pos = getPosition(continueStmtNode); return bLContinue; } @Override public BLangNode transform(ListenerDeclarationNode listenerDeclarationNode) { Token visibilityQualifier = null; if (listenerDeclarationNode.visibilityQualifier().isPresent()) { visibilityQualifier = listenerDeclarationNode.visibilityQualifier().get(); } BLangSimpleVariable var = new SimpleVarBuilder() .with(listenerDeclarationNode.variableName()) .setTypeByNode(listenerDeclarationNode.typeDescriptor().orElse(null)) .setExpressionByNode(listenerDeclarationNode.initializer()) .setVisibility(visibilityQualifier) .isListenerVar() .build(); var.pos = getPositionWithoutMetadata(listenerDeclarationNode); var.name.pos = getPosition(listenerDeclarationNode.variableName()); var.annAttachments = applyAll(getAnnotations(listenerDeclarationNode.metadata())); return var; } @Override public BLangNode transform(BreakStatementNode breakStmtNode) { BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode(); bLBreak.pos = getPosition(breakStmtNode); return bLBreak; } @Override public BLangNode transform(AssignmentStatementNode assignmentStmtNode) { SyntaxKind lhsKind = assignmentStmtNode.varRef().kind(); switch (lhsKind) { case LIST_BINDING_PATTERN: return createTupleDestructureStatement(assignmentStmtNode); case MAPPING_BINDING_PATTERN: return createRecordDestructureStatement(assignmentStmtNode); case ERROR_BINDING_PATTERN: return createErrorDestructureStatement(assignmentStmtNode); default: break; } BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode(); BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef()); validateLvexpr(lhsExpr, DiagnosticErrorCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT); bLAssignment.setExpression(createExpression(assignmentStmtNode.expression())); bLAssignment.pos = getPosition(assignmentStmtNode); bLAssignment.varRef = lhsExpr; return bLAssignment; } public BLangNode createTupleDestructureStatement(AssignmentStatementNode assignmentStmtNode) { BLangTupleDestructure tupleDestructure = (BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode(); tupleDestructure.varRef = (BLangTupleVarRef) createExpression(assignmentStmtNode.varRef()); tupleDestructure.setExpression(createExpression(assignmentStmtNode.expression())); tupleDestructure.pos = getPosition(assignmentStmtNode); return tupleDestructure; } public BLangNode createRecordDestructureStatement(AssignmentStatementNode assignmentStmtNode) { BLangRecordDestructure recordDestructure = (BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode(); recordDestructure.varRef = (BLangRecordVarRef) createExpression(assignmentStmtNode.varRef()); recordDestructure.setExpression(createExpression(assignmentStmtNode.expression())); recordDestructure.pos = getPosition(assignmentStmtNode); return recordDestructure; } public BLangNode createErrorDestructureStatement(AssignmentStatementNode assignmentStmtNode) { BLangErrorDestructure errorDestructure = (BLangErrorDestructure) TreeBuilder.createErrorDestructureStatementNode(); errorDestructure.varRef = (BLangErrorVarRef) createExpression(assignmentStmtNode.varRef()); errorDestructure.setExpression(createExpression(assignmentStmtNode.expression())); errorDestructure.pos = getPosition(assignmentStmtNode); return errorDestructure; } @Override public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) { BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode(); bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression())); bLCompAssignment .setVariable((VariableReferenceNode) createExpression(compoundAssignmentStmtNode.lhsExpression())); bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode); bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text()); return bLCompAssignment; } private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) { if (lExprNode.getKind() == NodeKind.INVOCATION) { dlog.error(((BLangInvocation) lExprNode).pos, errorCode); } if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode); } } @Override public BLangNode transform(DoStatementNode doStatementNode) { BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode(); bLDo.pos = getPosition(doStatementNode); BLangBlockStmt bLBlockStmt = (BLangBlockStmt) doStatementNode.blockStatement().apply(this); bLBlockStmt.pos = getPosition(doStatementNode.blockStatement()); bLDo.setBody(bLBlockStmt); doStatementNode.onFailClause().ifPresent(onFailClauseNode -> { bLDo.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return bLDo; } @Override public BLangNode transform(FailStatementNode failStatementNode) { BLangFail bLFail = (BLangFail) TreeBuilder.createFailNode(); bLFail.pos = getPosition(failStatementNode); bLFail.expr = createExpression(failStatementNode.expression()); return bLFail; } @Override public BLangNode transform(WhileStatementNode whileStmtNode) { BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode(); bLWhile.setCondition(createExpression(whileStmtNode.condition())); bLWhile.pos = getPosition(whileStmtNode); BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this); bLBlockStmt.pos = getPosition(whileStmtNode.whileBody()); bLWhile.setBody(bLBlockStmt); whileStmtNode.onFailClause().ifPresent(onFailClauseNode -> { bLWhile.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return bLWhile; } @Override public BLangNode transform(IfElseStatementNode ifElseStmtNode) { BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode(); bLIf.pos = getPosition(ifElseStmtNode); bLIf.setCondition(createExpression(ifElseStmtNode.condition())); bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this)); ifElseStmtNode.elseBody().ifPresent(elseBody -> { ElseBlockNode elseNode = (ElseBlockNode) elseBody; bLIf.setElseStatement( (org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this)); }); return bLIf; } @Override public BLangNode transform(BlockStatementNode blockStatement) { BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode(); this.isInLocalContext = true; bLBlockStmt.stmts = generateBLangStatements(blockStatement.statements()); this.isInLocalContext = false; bLBlockStmt.pos = getPosition(blockStatement); SyntaxKind parent = blockStatement.parent().kind(); if (parent == SyntaxKind.IF_ELSE_STATEMENT || parent == SyntaxKind.ELSE_BLOCK) { bLBlockStmt.pos = expandLeft(bLBlockStmt.pos, getPosition(blockStatement.parent())); } return bLBlockStmt; } @Override public BLangNode transform(RollbackStatementNode rollbackStatementNode) { BLangRollback rollbackStmt = (BLangRollback) TreeBuilder.createRollbackNode(); rollbackStmt.pos = getPosition(rollbackStatementNode); if (rollbackStatementNode.expression().isPresent()) { rollbackStmt.expr = createExpression(rollbackStatementNode.expression().get()); } return rollbackStmt; } @Override public BLangNode transform(LockStatementNode lockStatementNode) { BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode(); lockNode.pos = getPosition(lockStatementNode); BLangBlockStmt lockBlock = (BLangBlockStmt) lockStatementNode.blockStatement().apply(this); lockBlock.pos = getPosition(lockStatementNode.blockStatement()); lockNode.setBody(lockBlock); lockStatementNode.onFailClause().ifPresent(onFailClauseNode -> { lockNode.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return lockNode; } @Override public BLangNode transform(VariableDeclarationNode varDeclaration) { return (BLangNode) createBLangVarDef(getPosition(varDeclaration), varDeclaration.typedBindingPattern(), varDeclaration.initializer(), varDeclaration.finalKeyword()); } private VariableDefinitionNode createBLangVarDef(Location location, TypedBindingPatternNode typedBindingPattern, Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer, Optional<Token> finalKeyword) { BindingPatternNode bindingPattern = typedBindingPattern.bindingPattern(); BLangVariable variable = getBLangVariableNode(bindingPattern); List<Token> qualifiers = new ArrayList<>(); if (finalKeyword.isPresent()) { qualifiers.add(finalKeyword.get()); } NodeList<Token> qualifierList = NodeFactory.createNodeList(qualifiers); switch (bindingPattern.kind()) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: BLangSimpleVariableDef bLVarDef = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode(); bLVarDef.pos = variable.pos = location; BLangExpression expr = initializer.isPresent() ? createExpression(initializer.get()) : null; variable.setInitialExpression(expr); bLVarDef.setVariable(variable); if (finalKeyword.isPresent()) { variable.flagSet.add(Flag.FINAL); } TypeDescriptorNode typeDesc = typedBindingPattern.typeDescriptor(); variable.isDeclaredWithVar = isDeclaredWithVar(typeDesc); if (!variable.isDeclaredWithVar) { variable.setTypeNode(createTypeNode(typeDesc)); } return bLVarDef; case MAPPING_BINDING_PATTERN: initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer, qualifierList); return createRecordVariableDef(variable); case LIST_BINDING_PATTERN: initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer, qualifierList); return createTupleVariableDef(variable); case ERROR_BINDING_PATTERN: initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer, qualifierList); return createErrorVariableDef(variable); default: throw new RuntimeException( "Syntax kind is not a valid binding pattern " + typedBindingPattern.bindingPattern().kind()); } } private void initializeBLangVariable(BLangVariable var, TypeDescriptorNode type, Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer, NodeList<Token> qualifiers) { for (Token qualifier : qualifiers) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.FINAL_KEYWORD) { markVariableWithFlag(var, Flag.FINAL); } else if (qualifier.kind() == SyntaxKind.CONFIGURABLE_KEYWORD) { var.flagSet.add(Flag.CONFIGURABLE); var.flagSet.add(Flag.FINAL); if (initializer.get().kind() == SyntaxKind.REQUIRED_EXPRESSION) { var.flagSet.add(Flag.REQUIRED); initializer = Optional.empty(); } } else if (kind == SyntaxKind.ISOLATED_KEYWORD) { var.flagSet.add(Flag.ISOLATED); } } var.isDeclaredWithVar = isDeclaredWithVar(type); if (!var.isDeclaredWithVar) { var.setTypeNode(createTypeNode(type)); } if (initializer.isPresent()) { var.setInitialExpression(createExpression(initializer.get())); } } private BLangRecordVariableDef createRecordVariableDef(BLangVariable var) { BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode(); varDefNode.pos = var.pos; varDefNode.setVariable(var); return varDefNode; } private BLangTupleVariableDef createTupleVariableDef(BLangVariable tupleVar) { BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode(); varDefNode.pos = tupleVar.pos; varDefNode.setVariable(tupleVar); return varDefNode; } private BLangErrorVariableDef createErrorVariableDef(BLangVariable errorVar) { BLangErrorVariableDef varDefNode = (BLangErrorVariableDef) TreeBuilder.createErrorVariableDefinitionNode(); varDefNode.pos = errorVar.pos; varDefNode.setVariable(errorVar); return varDefNode; } @Override public BLangNode transform(ExpressionStatementNode expressionStatement) { SyntaxKind kind = expressionStatement.expression().kind(); switch (kind) { case ASYNC_SEND_ACTION: return expressionStatement.expression().apply(this); default: BLangExpressionStmt bLExpressionStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); bLExpressionStmt.expr = createExpression(expressionStatement.expression()); bLExpressionStmt.pos = getPosition(expressionStatement); return bLExpressionStmt; } } @Override public BLangNode transform(AsyncSendActionNode asyncSendActionNode) { BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode(); workerSendNode.setWorkerName(createIdentifier(getPosition(asyncSendActionNode.peerWorker()), asyncSendActionNode.peerWorker().name())); workerSendNode.expr = createExpression(asyncSendActionNode.expression()); workerSendNode.pos = getPosition(asyncSendActionNode); return workerSendNode; } @Override public BLangNode transform(WaitActionNode waitActionNode) { Node waitFutureExpr = waitActionNode.waitFutureExpr(); if (waitFutureExpr.kind() == SyntaxKind.WAIT_FIELDS_LIST) { return getWaitForAllExpr((WaitFieldsListNode) waitFutureExpr); } BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode(); waitExpr.pos = getPosition(waitActionNode); waitExpr.exprList = Collections.singletonList(createExpression(waitFutureExpr)); return waitExpr; } private BLangWaitForAllExpr getWaitForAllExpr(WaitFieldsListNode waitFields) { BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode(); List<BLangWaitKeyValue> exprs = new ArrayList<>(); for (Node waitField : waitFields.waitFields()) { exprs.add(getWaitForAllExpr(waitField)); } bLangWaitForAll.keyValuePairs = exprs; bLangWaitForAll.pos = getPosition(waitFields); return bLangWaitForAll; } private BLangWaitKeyValue getWaitForAllExpr(Node waitFields) { BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode(); keyValue.pos = getPosition(waitFields); if (waitFields.kind() == SyntaxKind.WAIT_FIELD) { WaitFieldNode waitFieldNode = (WaitFieldNode) waitFields; BLangIdentifier key = createIdentifier(waitFieldNode.fieldName().name()); key.setLiteral(false); keyValue.key = key; keyValue.valueExpr = createExpression(waitFieldNode.waitFutureExpr()); return keyValue; } SimpleNameReferenceNode varName = (SimpleNameReferenceNode) waitFields; BLangIdentifier key = createIdentifier(varName.name()); key.setLiteral(false); keyValue.key = key; BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = getPosition(varName); varRef.variableName = key; varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); keyValue.keyExpr = varRef; return keyValue; } @Override public BLangNode transform(StartActionNode startActionNode) { BLangNode expression = createActionOrExpression(startActionNode.expression()); BLangInvocation invocation; if (!(expression instanceof BLangWorkerSend)) { invocation = (BLangInvocation) expression; } else { invocation = (BLangInvocation) ((BLangWorkerSend) expression).expr; expression = ((BLangWorkerSend) expression).expr; } if (expression.getKind() == NodeKind.INVOCATION) { BLangActionInvocation actionInvocation = (BLangActionInvocation) TreeBuilder.createActionInvocation(); actionInvocation.expr = invocation.expr; actionInvocation.pkgAlias = invocation.pkgAlias; actionInvocation.name = invocation.name; actionInvocation.argExprs = invocation.argExprs; actionInvocation.flagSet = invocation.flagSet; actionInvocation.pos = getPosition(startActionNode); invocation = actionInvocation; } invocation.async = true; invocation.annAttachments = applyAll(startActionNode.annotations()); return invocation; } @Override public BLangNode transform(TransactionStatementNode transactionStatementNode) { BLangTransaction transaction = (BLangTransaction) TreeBuilder.createTransactionNode(); BLangBlockStmt transactionBlock = (BLangBlockStmt) transactionStatementNode.blockStatement().apply(this); transactionBlock.pos = getPosition(transactionStatementNode.blockStatement()); transaction.setTransactionBody(transactionBlock); transaction.pos = getPosition(transactionStatementNode); transactionStatementNode.onFailClause().ifPresent(onFailClauseNode -> { transaction.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return transaction; } @Override public BLangNode transform(PositionalArgumentNode argumentNode) { return createExpression(argumentNode.expression()); } @Override public BLangNode transform(NamedArgumentNode namedArgumentNode) { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode(); namedArg.pos = getPosition(namedArgumentNode); namedArg.name = this.createIdentifier(namedArgumentNode.argumentName().name()); namedArg.expr = createExpression(namedArgumentNode.expression()); return namedArg; } @Override public BLangNode transform(RestArgumentNode restArgumentNode) { BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); varArgs.pos = getPosition(restArgumentNode.ellipsis()); varArgs.expr = createExpression(restArgumentNode.expression()); return varArgs; } @Override public BLangNode transform(RequiredParameterNode requiredParameter) { BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(), requiredParameter.typeName(), requiredParameter.annotations()); simpleVar.pos = getPosition(requiredParameter); if (requiredParameter.paramName().isPresent()) { simpleVar.name.pos = getPosition(requiredParameter.paramName().get()); } simpleVar.flagSet.add(Flag.REQUIRED_PARAM); return simpleVar; } @Override public BLangNode transform(IncludedRecordParameterNode includedRecordParameterNode) { BLangSimpleVariable simpleVar = createSimpleVar(includedRecordParameterNode.paramName(), includedRecordParameterNode.typeName(), includedRecordParameterNode.annotations()); simpleVar.flagSet.add(INCLUDED); simpleVar.pos = getPosition(includedRecordParameterNode); if (includedRecordParameterNode.paramName().isPresent()) { simpleVar.name.pos = getPosition(includedRecordParameterNode.paramName().get()); } simpleVar.pos = trimLeft(simpleVar.pos, getPosition(includedRecordParameterNode.typeName())); return simpleVar; } @Override public BLangNode transform(DefaultableParameterNode defaultableParameter) { BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(), defaultableParameter.typeName(), defaultableParameter.annotations()); simpleVar.setInitialExpression(createExpression(defaultableParameter.expression())); simpleVar.flagSet.add(Flag.DEFAULTABLE_PARAM); simpleVar.pos = getPosition(defaultableParameter); return simpleVar; } @Override public BLangNode transform(RestParameterNode restParameter) { BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName(), restParameter.annotations()); BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode(); bLArrayType.elemtype = bLSimpleVar.typeNode; bLArrayType.dimensions = 1; bLSimpleVar.typeNode = bLArrayType; bLArrayType.pos = getPosition(restParameter.typeName()); bLSimpleVar.flagSet.add(Flag.REST_PARAM); bLSimpleVar.pos = getPosition(restParameter); return bLSimpleVar; } @Override public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) { BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken()); nilTypeNode.typeKind = TypeKind.NIL; BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor())); unionTypeNode.memberTypeNodes.add(nilTypeNode); unionTypeNode.nullable = true; unionTypeNode.pos = getPosition(optTypeDescriptor); return unionTypeNode; } @Override public BLangNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) { BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode(); functionTypeNode.pos = getPosition(functionTypeDescriptorNode); functionTypeNode.returnsKeywordExists = true; if (functionTypeDescriptorNode.functionSignature().isPresent()) { FunctionSignatureNode funcSignature = functionTypeDescriptorNode.functionSignature().get(); for (ParameterNode child : funcSignature.parameters()) { SimpleVariableNode param = (SimpleVariableNode) child.apply(this); if (child.kind() == SyntaxKind.REST_PARAM) { functionTypeNode.restParam = (BLangSimpleVariable) param; } else { functionTypeNode.params.add((BLangVariable) param); } } Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc(); if (retNode.isPresent()) { ReturnTypeDescriptorNode returnType = retNode.get(); functionTypeNode.returnTypeNode = createTypeNode(returnType.type()); } else { BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode(); bLValueType.pos = getPosition(funcSignature); bLValueType.typeKind = TypeKind.NIL; functionTypeNode.returnTypeNode = bLValueType; } } else { functionTypeNode.flagSet.add(Flag.ANY_FUNCTION); } functionTypeNode.flagSet.add(Flag.PUBLIC); for (Token token : functionTypeDescriptorNode.qualifierList()) { if (token.kind() == SyntaxKind.ISOLATED_KEYWORD) { functionTypeNode.flagSet.add(Flag.ISOLATED); } else if (token.kind() == SyntaxKind.TRANSACTIONAL_KEYWORD) { functionTypeNode.flagSet.add(Flag.TRANSACTIONAL); } } return functionTypeNode; } @Override public BLangNode transform(MapTypeDescriptorNode mapTypeDescNode) { BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = TypeKind.MAP; refType.pos = getPosition(mapTypeDescNode); BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode(); constrainedType.type = refType; constrainedType.constraint = createTypeNode(mapTypeDescNode.mapTypeParamsNode().typeNode()); constrainedType.pos = refType.pos; return constrainedType; } @Override public BLangNode transform(KeySpecifierNode keySpecifierNode) { BLangTableKeySpecifier tableKeySpecifierNode = (BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode(); tableKeySpecifierNode.pos = getPosition(keySpecifierNode); for (Token field : keySpecifierNode.fieldNames()) { tableKeySpecifierNode.addFieldNameIdentifier(createIdentifier(field)); } return tableKeySpecifierNode; } @Override public BLangNode transform(KeyTypeConstraintNode keyTypeConstraintNode) { BLangTableKeyTypeConstraint tableKeyTypeConstraint = new BLangTableKeyTypeConstraint(); tableKeyTypeConstraint.pos = getPosition(keyTypeConstraintNode); tableKeyTypeConstraint.keyType = createTypeNode(keyTypeConstraintNode.typeParameterNode()); return tableKeyTypeConstraint; } @Override public BLangNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) { BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = TreeUtils.stringToTypeKind(tableTypeDescriptorNode.tableKeywordToken().text()); refType.pos = getPosition(tableTypeDescriptorNode); BLangTableTypeNode tableTypeNode = (BLangTableTypeNode) TreeBuilder.createTableTypeNode(); tableTypeNode.pos = getPosition(tableTypeDescriptorNode); tableTypeNode.type = refType; tableTypeNode.constraint = createTypeNode(tableTypeDescriptorNode.rowTypeParameterNode()); if (tableTypeDescriptorNode.keyConstraintNode().isPresent()) { Node constraintNode = tableTypeDescriptorNode.keyConstraintNode().get(); if (constraintNode.kind() == SyntaxKind.KEY_TYPE_CONSTRAINT) { tableTypeNode.tableKeyTypeConstraint = (BLangTableKeyTypeConstraint) constraintNode.apply(this); } else if (constraintNode.kind() == SyntaxKind.KEY_SPECIFIER) { tableTypeNode.tableKeySpecifier = (BLangTableKeySpecifier) constraintNode.apply(this); } } tableTypeNode.isTypeInlineDefined = checkIfAnonymous(tableTypeDescriptorNode); return tableTypeNode; } @Override public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) { BLangUserDefinedType bLUserDefinedType = new BLangUserDefinedType(); bLUserDefinedType.pos = getPosition(simpleNameRefNode); bLUserDefinedType.typeName = createIdentifier(simpleNameRefNode.name()); bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); return bLUserDefinedType; } @Override public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = getPosition(qualifiedNameReferenceNode); varRef.variableName = createIdentifier(qualifiedNameReferenceNode.identifier()); varRef.pkgAlias = createIdentifier(qualifiedNameReferenceNode.modulePrefix()); return varRef; } @Override public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) { BLangXMLProcInsLiteral xmlProcInsLiteral = (BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode(); if (xmlProcessingInstruction.data().isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = getPosition(xmlProcessingInstruction); xmlProcInsLiteral.dataFragments.add(emptyLiteral); } else { for (Node dataNode : xmlProcessingInstruction.data()) { xmlProcInsLiteral.dataFragments.add(createExpression(dataNode)); } } XMLNameNode target = xmlProcessingInstruction.target(); if (target.kind() == SyntaxKind.XML_SIMPLE_NAME) { xmlProcInsLiteral.target = createSimpleLiteral(((XMLSimpleNameNode) target).name()); } else { xmlProcInsLiteral.target = createSimpleLiteral(((XMLQualifiedNameNode) target).prefix()); } xmlProcInsLiteral.pos = getPosition(xmlProcessingInstruction); return xmlProcInsLiteral; } @Override public BLangNode transform(XMLComment xmlComment) { BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode(); Location pos = getPosition(xmlComment); if (xmlComment.content().isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = pos; xmlCommentLiteral.textFragments.add(emptyLiteral); } else { for (Node commentNode : xmlComment.content()) { xmlCommentLiteral.textFragments.add(createExpression(commentNode)); } } xmlCommentLiteral.pos = pos; return xmlCommentLiteral; } @Override public BLangNode transform(XMLElementNode xmlElementNode) { BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode(); xmlElement.startTagName = createExpression(xmlElementNode.startTag()); xmlElement.endTagName = createExpression(xmlElementNode.endTag()); for (Node node : xmlElementNode.content()) { if (node.kind() == SyntaxKind.XML_TEXT) { xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content())); continue; } xmlElement.children.add(createExpression(node)); } for (XMLAttributeNode attribute : xmlElementNode.startTag().attributes()) { xmlElement.attributes.add((BLangXMLAttribute) attribute.apply(this)); } xmlElement.pos = getPosition(xmlElementNode); xmlElement.isRoot = true; return xmlElement; } @Override public BLangNode transform(XMLAttributeNode xmlAttributeNode) { BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode(); xmlAttribute.value = (BLangXMLQuotedString) xmlAttributeNode.value().apply(this); xmlAttribute.name = createExpression(xmlAttributeNode.attributeName()); xmlAttribute.pos = getPosition(xmlAttributeNode); return xmlAttribute; } @Override public BLangNode transform(ByteArrayLiteralNode byteArrayLiteralNode) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.pos = getPosition(byteArrayLiteralNode); literal.setBType(symTable.getTypeFromTag(TypeTags.BYTE_ARRAY)); literal.getBType().tag = TypeTags.BYTE_ARRAY; literal.value = getValueFromByteArrayNode(byteArrayLiteralNode); literal.originalValue = String.valueOf(literal.value); return literal; } @Override public BLangNode transform(XMLAttributeValue xmlAttributeValue) { BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode(); quotedString.pos = getPosition(xmlAttributeValue); if (xmlAttributeValue.startQuote().kind() == SyntaxKind.SINGLE_QUOTE_TOKEN) { quotedString.quoteType = QuoteType.SINGLE_QUOTE; } else { quotedString.quoteType = QuoteType.DOUBLE_QUOTE; } if (xmlAttributeValue.value().isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = getPosition(xmlAttributeValue); quotedString.textFragments.add(emptyLiteral); } else if (xmlAttributeValue.value().size() == 1 && xmlAttributeValue.value().get(0).kind() == SyntaxKind.INTERPOLATION) { quotedString.textFragments.add(createExpression(xmlAttributeValue.value().get(0))); BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = getPosition(xmlAttributeValue); quotedString.textFragments.add(emptyLiteral); } else { for (Node value : xmlAttributeValue.value()) { if (value.kind() == SyntaxKind.XML_TEXT_CONTENT) { Token token = (Token) value; String normalizedValue = XmlFactory.XMLTextUnescape.unescape(token.text()); quotedString.textFragments.add(createStringLiteral(normalizedValue, getPosition(value))); } else { quotedString.textFragments.add(createExpression(value)); } } } return quotedString; } @Override public BLangNode transform(XMLStartTagNode startTagNode) { return startTagNode.name().apply(this); } @Override public BLangNode transform(XMLEndTagNode endTagNode) { return endTagNode.name().apply(this); } @Override public BLangNode transform(XMLTextNode xmlTextNode) { return createExpression(xmlTextNode.content()); } private BLangNode createXMLEmptyLiteral(TemplateExpressionNode expressionNode) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.pos = getPosition(expressionNode); xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos)); return xmlTextLiteral; } private BLangNode createXMLTextLiteral(List<Node> expressionNode) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.pos = getPosition(expressionNode.get(0)); for (Node node : expressionNode) { xmlTextLiteral.textFragments.add(createExpression(node)); } xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos)); return xmlTextLiteral; } private BLangNode createXMLTextLiteral(Node expressionNode) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.pos = getPosition(expressionNode); xmlTextLiteral.textFragments.add(createExpression(expressionNode)); return xmlTextLiteral; } @Override public BLangNode transform(XMLNamespaceDeclarationNode xmlnsDeclNode) { BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode(); BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null)); BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri()); xmlns.namespaceURI = namespaceUri; xmlns.prefix = prefixIdentifier; xmlns.pos = getPosition(xmlnsDeclNode); BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode(); xmlnsStmt.xmlnsDecl = xmlns; xmlnsStmt.pos = getPosition(xmlnsDeclNode); return xmlnsStmt; } @Override public BLangNode transform(ModuleXMLNamespaceDeclarationNode xmlnsDeclNode) { BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode(); BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null)); BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri()); xmlns.namespaceURI = namespaceUri; xmlns.prefix = prefixIdentifier; xmlns.pos = getPosition(xmlnsDeclNode); return xmlns; } @Override public BLangNode transform(XMLQualifiedNameNode xmlQualifiedNameNode) { BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode(); xmlName.localname = createIdentifier(getPosition(xmlQualifiedNameNode.name()), xmlQualifiedNameNode.name().name()); xmlName.prefix = createIdentifier(getPosition(xmlQualifiedNameNode.prefix()), xmlQualifiedNameNode.prefix().name()); xmlName.pos = getPosition(xmlQualifiedNameNode); return xmlName; } @Override public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) { BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode(); xmlName.localname = createIdentifier(xmlSimpleNameNode.name()); xmlName.prefix = createIdentifier(null, ""); xmlName.pos = getPosition(xmlSimpleNameNode); return xmlName; } @Override public BLangNode transform(XMLEmptyElementNode xMLEmptyElementNode) { BLangXMLElementLiteral xmlEmptyElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode(); xmlEmptyElement.startTagName = createExpression(xMLEmptyElementNode.name()); for (XMLAttributeNode attribute : xMLEmptyElementNode.attributes()) { xmlEmptyElement.attributes.add((BLangXMLAttribute) attribute.apply(this)); } xmlEmptyElement.pos = getPosition(xMLEmptyElementNode); return xmlEmptyElement; } @Override public BLangNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) { BLangInvocation.BLangActionInvocation bLangActionInvocation = (BLangInvocation.BLangActionInvocation) TreeBuilder.createActionInvocation(); bLangActionInvocation.expr = createExpression(remoteMethodCallActionNode.expression()); bLangActionInvocation.argExprs = applyAll(remoteMethodCallActionNode.arguments()); BLangNameReference nameReference = createBLangNameReference(remoteMethodCallActionNode.methodName().name()); bLangActionInvocation.name = (BLangIdentifier) nameReference.name; bLangActionInvocation.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; bLangActionInvocation.pos = getPosition(remoteMethodCallActionNode); return bLangActionInvocation; } @Override public BLangNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) { BLangType constraint, error = null; Location pos = getPosition(streamTypeDescriptorNode); Optional<Node> paramsNode = streamTypeDescriptorNode.streamTypeParamsNode(); boolean hasConstraint = paramsNode.isPresent(); if (!hasConstraint) { constraint = addValueType(pos, TypeKind.ANY); } else { StreamTypeParamsNode params = (StreamTypeParamsNode) paramsNode.get(); if (params.rightTypeDescNode().isPresent()) { error = createTypeNode(params.rightTypeDescNode().get()); } constraint = createTypeNode(params.leftTypeDescNode()); } BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = TypeKind.STREAM; refType.pos = pos; BLangStreamType streamType = (BLangStreamType) TreeBuilder.createStreamTypeNode(); streamType.type = refType; streamType.constraint = constraint; streamType.error = error; streamType.pos = pos; return streamType; } @Override public BLangNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) { int dimensions = 1; List<BLangExpression> sizes = new ArrayList<>(); Location position = getPosition(arrayTypeDescriptorNode); while (true) { if (arrayTypeDescriptorNode.arrayLength().isEmpty()) { sizes.add(new BLangLiteral(OPEN_ARRAY_INDICATOR, symTable.intType)); } else { Node keyExpr = arrayTypeDescriptorNode.arrayLength().get(); if (keyExpr.kind() == SyntaxKind.NUMERIC_LITERAL) { Token literalToken = ((BasicLiteralNode) keyExpr).literalToken(); if (literalToken.kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) { sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text()), symTable.intType)); } else { sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text(), 16), symTable.intType)); } } else if (keyExpr.kind() == SyntaxKind.ASTERISK_LITERAL) { sizes.add(new BLangLiteral(INFERRED_ARRAY_INDICATOR, symTable.intType)); } else { sizes.add(createExpression(keyExpr)); } } if (arrayTypeDescriptorNode.memberTypeDesc().kind() != SyntaxKind.ARRAY_TYPE_DESC) { break; } arrayTypeDescriptorNode = (ArrayTypeDescriptorNode) arrayTypeDescriptorNode.memberTypeDesc(); dimensions++; } BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode(); arrayTypeNode.pos = position; arrayTypeNode.elemtype = createTypeNode(arrayTypeDescriptorNode.memberTypeDesc()); arrayTypeNode.dimensions = dimensions; arrayTypeNode.sizes = sizes.toArray(new BLangExpression[0]); return arrayTypeNode; } public BLangNode transform(EnumDeclarationNode enumDeclarationNode) { Boolean publicQualifier = false; if (enumDeclarationNode.qualifier().isPresent() && enumDeclarationNode.qualifier().get().kind() == SyntaxKind.PUBLIC_KEYWORD) { publicQualifier = true; } for (Node member : enumDeclarationNode.enumMemberList()) { EnumMemberNode enumMember = (EnumMemberNode) member; if (enumMember.identifier().isMissing()) { continue; } addToTop(transformEnumMember(enumMember, publicQualifier)); } BLangTypeDefinition bLangTypeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); if (publicQualifier) { bLangTypeDefinition.flagSet.add(Flag.PUBLIC); } bLangTypeDefinition.flagSet.add(Flag.ENUM); bLangTypeDefinition.setName((BLangIdentifier) transform(enumDeclarationNode.identifier())); bLangTypeDefinition.pos = getPosition(enumDeclarationNode); BLangUnionTypeNode bLangUnionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); bLangUnionTypeNode.pos = bLangTypeDefinition.pos; for (Node member : enumDeclarationNode.enumMemberList()) { Node enumMemberIdentifier = ((EnumMemberNode) member).identifier(); if (enumMemberIdentifier.isMissing()) { continue; } bLangUnionTypeNode.memberTypeNodes.add(createTypeNode(enumMemberIdentifier)); } Collections.reverse(bLangUnionTypeNode.memberTypeNodes); bLangTypeDefinition.setTypeNode(bLangUnionTypeNode); bLangTypeDefinition.annAttachments = applyAll(getAnnotations(enumDeclarationNode.metadata())); bLangTypeDefinition.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(enumDeclarationNode.metadata())); return bLangTypeDefinition; } public BLangConstant transformEnumMember(EnumMemberNode member, Boolean publicQualifier) { BLangConstant bLangConstant = (BLangConstant) TreeBuilder.createConstantNode(); bLangConstant.pos = getPosition(member); bLangConstant.flagSet.add(Flag.CONSTANT); bLangConstant.flagSet.add(Flag.ENUM_MEMBER); if (publicQualifier) { bLangConstant.flagSet.add(Flag.PUBLIC); } bLangConstant.annAttachments = applyAll(getAnnotations(member.metadata())); bLangConstant.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(member.metadata())); bLangConstant.setName((BLangIdentifier) transform(member.identifier())); BLangExpression deepLiteral; if (member.constExprNode().isPresent()) { BLangExpression expression = createExpression(member.constExprNode().orElse(null)); bLangConstant.setInitialExpression(expression); deepLiteral = createExpression(member.constExprNode().orElse(null)); } else { BLangLiteral literal = createSimpleLiteral(member.identifier()); bLangConstant.setInitialExpression(literal); deepLiteral = createSimpleLiteral(member.identifier()); } BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); typeNode.pos = symTable.builtinPos; typeNode.typeKind = TypeKind.STRING; bLangConstant.setTypeNode(typeNode); if (deepLiteral instanceof BLangLiteral) { BLangLiteral literal = (BLangLiteral) deepLiteral; if (!literal.originalValue.equals("")) { BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); literal.originalValue = null; typeNodeAssociated.addValue(deepLiteral); bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated); } else { bLangConstant.associatedTypeDefinition = null; } } else { BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); typeNodeAssociated.addValue(deepLiteral); bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated); } return bLangConstant; } @Override public BLangNode transform(QueryExpressionNode queryExprNode) { BLangQueryExpr queryExpr = (BLangQueryExpr) TreeBuilder.createQueryExpressionNode(); queryExpr.pos = getPosition(queryExprNode); BLangFromClause fromClause = (BLangFromClause) queryExprNode.queryPipeline().fromClause().apply(this); queryExpr.queryClauseList.add(fromClause); for (Node clauseNode : queryExprNode.queryPipeline().intermediateClauses()) { queryExpr.queryClauseList.add(clauseNode.apply(this)); } BLangSelectClause selectClause = (BLangSelectClause) queryExprNode.selectClause().apply(this); queryExpr.queryClauseList.add(selectClause); Optional<OnConflictClauseNode> onConflict = queryExprNode.onConflictClause(); onConflict.ifPresent(onConflictClauseNode -> queryExpr.queryClauseList.add(onConflictClauseNode.apply(this))); boolean isTable = false; boolean isStream = false; Optional<QueryConstructTypeNode> optionalQueryConstructTypeNode = queryExprNode.queryConstructType(); if (optionalQueryConstructTypeNode.isPresent()) { QueryConstructTypeNode queryConstructTypeNode = optionalQueryConstructTypeNode.get(); isTable = queryConstructTypeNode.keyword().kind() == SyntaxKind.TABLE_KEYWORD; isStream = queryConstructTypeNode.keyword().kind() == SyntaxKind.STREAM_KEYWORD; if (queryConstructTypeNode.keySpecifier().isPresent()) { for (IdentifierToken fieldNameNode : queryConstructTypeNode.keySpecifier().get().fieldNames()) { queryExpr.fieldNameIdentifierList.add(createIdentifier(getPosition(fieldNameNode), fieldNameNode)); } } } queryExpr.isStream = isStream; queryExpr.isTable = isTable; return queryExpr; } public BLangNode transform(OnFailClauseNode onFailClauseNode) { Location pos = getPosition(onFailClauseNode); BLangSimpleVariableDef variableDefinitionNode = (BLangSimpleVariableDef) TreeBuilder. createSimpleVariableDefinitionNode(); BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); boolean isDeclaredWithVar = onFailClauseNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; var.isDeclaredWithVar = isDeclaredWithVar; if (!isDeclaredWithVar) { var.setTypeNode(createTypeNode(onFailClauseNode.typeDescriptor())); } var.pos = getPosition(onFailClauseNode); var.setName(this.createIdentifier(onFailClauseNode.failErrorName())); var.name.pos = getPosition(onFailClauseNode.failErrorName()); variableDefinitionNode.setVariable(var); variableDefinitionNode.pos = var.name.pos; BLangOnFailClause onFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode(); onFailClause.pos = pos; onFailClause.isDeclaredWithVar = isDeclaredWithVar; markVariableWithFlag(variableDefinitionNode.getVariable(), Flag.FINAL); onFailClause.variableDefinitionNode = variableDefinitionNode; BLangBlockStmt blockNode = (BLangBlockStmt) transform(onFailClauseNode.blockStatement()); blockNode.pos = getPosition(onFailClauseNode); onFailClause.body = blockNode; return onFailClause; } @Override public BLangNode transform(LetClauseNode letClauseNode) { BLangLetClause bLLetClause = (BLangLetClause) TreeBuilder.createLetClauseNode(); bLLetClause.pos = getPosition(letClauseNode); List<BLangLetVariable> letVars = new ArrayList<>(); for (LetVariableDeclarationNode letVarDeclr : letClauseNode.letVarDeclarations()) { BLangLetVariable letVar = createLetVariable(letVarDeclr); letVar.definitionNode.getVariable().addFlag(Flag.FINAL); letVars.add(letVar); } if (!letVars.isEmpty()) { bLLetClause.letVarDeclarations = letVars; } return bLLetClause; } @Override public BLangNode transform(FromClauseNode fromClauseNode) { BLangFromClause fromClause = (BLangFromClause) TreeBuilder.createFromClauseNode(); fromClause.pos = getPosition(fromClauseNode); fromClause.collection = createExpression(fromClauseNode.expression()); TypedBindingPatternNode bindingPatternNode = fromClauseNode.typedBindingPattern(); fromClause.variableDefinitionNode = createBLangVarDef(getPosition(bindingPatternNode), bindingPatternNode, Optional.empty(), Optional.empty()); boolean isDeclaredWithVar = bindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; fromClause.isDeclaredWithVar = isDeclaredWithVar; return fromClause; } @Override public BLangNode transform(WhereClauseNode whereClauseNode) { BLangWhereClause whereClause = (BLangWhereClause) TreeBuilder.createWhereClauseNode(); whereClause.pos = getPosition(whereClauseNode); whereClause.expression = createExpression(whereClauseNode.expression()); return whereClause; } @Override public BLangNode transform(SelectClauseNode selectClauseNode) { BLangSelectClause selectClause = (BLangSelectClause) TreeBuilder.createSelectClauseNode(); selectClause.pos = getPosition(selectClauseNode); selectClause.expression = createExpression(selectClauseNode.expression()); return selectClause; } @Override public BLangNode transform(OnConflictClauseNode onConflictClauseNode) { BLangOnConflictClause onConflictClause = (BLangOnConflictClause) TreeBuilder.createOnConflictClauseNode(); onConflictClause.pos = getPosition(onConflictClauseNode); onConflictClause.expression = createExpression(onConflictClauseNode.expression()); return onConflictClause; } @Override public BLangNode transform(LimitClauseNode limitClauseNode) { BLangLimitClause selectClause = (BLangLimitClause) TreeBuilder.createLimitClauseNode(); selectClause.pos = getPosition(limitClauseNode); selectClause.expression = createExpression(limitClauseNode.expression()); return selectClause; } @Override public BLangNode transform(OnClauseNode onClauseNode) { BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode(); onClause.pos = getPosition(onClauseNode); onClause.lhsExpr = createExpression(onClauseNode.lhsExpression()); onClause.rhsExpr = createExpression(onClauseNode.rhsExpression()); return onClause; } @Override public BLangNode transform(JoinClauseNode joinClauseNode) { BLangJoinClause joinClause = (BLangJoinClause) TreeBuilder.createJoinClauseNode(); joinClause.pos = getPosition(joinClauseNode); TypedBindingPatternNode typedBindingPattern = joinClauseNode.typedBindingPattern(); joinClause.variableDefinitionNode = createBLangVarDef(getPosition(joinClauseNode), typedBindingPattern, Optional.empty(), Optional.empty()); joinClause.collection = createExpression(joinClauseNode.expression()); joinClause.isDeclaredWithVar = typedBindingPattern.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; joinClause.isOuterJoin = joinClauseNode.outerKeyword().isPresent(); OnClauseNode onClauseNode = joinClauseNode.joinOnCondition(); BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode(); onClause.pos = getPosition(onClauseNode); if (!onClauseNode.equalsKeyword().isMissing()) { onClause.equalsKeywordPos = getPosition(onClauseNode.equalsKeyword()); } onClause.lhsExpr = createExpression(onClauseNode.lhsExpression()); onClause.rhsExpr = createExpression(onClauseNode.rhsExpression()); joinClause.onClause = onClause; return joinClause; } @Override public BLangNode transform(OrderByClauseNode orderByClauseNode) { BLangOrderByClause orderByClause = (BLangOrderByClause) TreeBuilder.createOrderByClauseNode(); orderByClause.pos = getPosition(orderByClauseNode); for (OrderKeyNode orderKeyNode : orderByClauseNode.orderKey()) { orderByClause.addOrderKey(createOrderKey(orderKeyNode)); } return orderByClause; } public BLangOrderKey createOrderKey(OrderKeyNode orderKeyNode) { BLangOrderKey orderKey = (BLangOrderKey) TreeBuilder.createOrderKeyNode(); orderKey.pos = getPosition(orderKeyNode); orderKey.expression = createExpression(orderKeyNode.expression()); if (orderKeyNode.orderDirection().isPresent() && orderKeyNode.orderDirection().get().text().equals("descending")) { orderKey.isAscending = false; } else { orderKey.isAscending = true; } return orderKey; } @Override public BLangNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) { BLangType lhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.leftTypeDesc()); BLangType rhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.rightTypeDesc()); BLangIntersectionTypeNode intersectionType; if (rhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) { intersectionType = (BLangIntersectionTypeNode) rhsType; intersectionType.constituentTypeNodes.add(0, lhsType); } else if (lhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) { intersectionType = (BLangIntersectionTypeNode) lhsType; intersectionType.constituentTypeNodes.add(rhsType); } else { intersectionType = (BLangIntersectionTypeNode) TreeBuilder.createIntersectionTypeNode(); intersectionType.constituentTypeNodes.add(lhsType); intersectionType.constituentTypeNodes.add(rhsType); } intersectionType.pos = getPosition(intersectionTypeDescriptorNode); return intersectionType; } @Override public BLangNode transform(InferredTypedescDefaultNode inferDefaultValueNode) { BLangInferredTypedescDefaultNode inferTypedescExpr = (BLangInferredTypedescDefaultNode) TreeBuilder.createInferTypedescExpressionNode(); inferTypedescExpr.pos = getPosition(inferDefaultValueNode); return inferTypedescExpr; } @Override protected BLangNode transformSyntaxNode(Node node) { throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName()); } @Override public BLangNode transform(ServiceDeclarationNode serviceDeclarationNode) { Location pos = getPositionWithoutMetadata(serviceDeclarationNode); BLangClassDefinition anonClassDef = transformObjectCtorExpressionBody(serviceDeclarationNode.members()); anonClassDef.isServiceDecl = true; anonClassDef.pos = pos; anonClassDef.flagSet.add(SERVICE); setClassQualifiers(serviceDeclarationNode.qualifiers(), anonClassDef); List<IdentifierNode> absResourcePathPath = new ArrayList<>(); NodeList<Node> pathList = serviceDeclarationNode.absoluteResourcePath(); BLangLiteral serviceNameLiteral = null; if (pathList.size() == 1 && pathList.get(0).kind() == SyntaxKind.STRING_LITERAL) { serviceNameLiteral = (BLangLiteral) createExpression(pathList.get(0)); } else { for (var token : pathList) { String text = ((Token) token).text(); if (pathList.size() == 1 && text.equals("/")) { absResourcePathPath.add(createIdentifier((Token) token)); } else if (!text.equals("/")) { absResourcePathPath.add(createIdentifier((Token) token)); } } } String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(pos, genName); anonClassDef.setName(anonTypeGenName); anonClassDef.flagSet.add(Flag.PUBLIC); Optional<TypeDescriptorNode> typeReference = serviceDeclarationNode.typeDescriptor(); typeReference.ifPresent(typeReferenceNode -> { BLangType typeNode = createTypeNode(typeReferenceNode); anonClassDef.typeRefs.add(typeNode); }); anonClassDef.annAttachments = applyAll(getAnnotations(serviceDeclarationNode.metadata())); anonClassDef.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(serviceDeclarationNode.metadata())); addToTop(anonClassDef); BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode(); BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClassDef.name); BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = pos; initNode.userDefinedType = userDefinedType; BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier pkgAlias = createIdentifier(pos, ""); BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName)); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; BLangSimpleVariable serviceVariable = createServiceVariable(pos, anonClassDef, initNode); List<BLangExpression> exprs = new ArrayList<>(); for (var exp : serviceDeclarationNode.expressions()) { exprs.add(createExpression(exp)); } BLangService service = (BLangService) TreeBuilder.createServiceNode(); service.serviceVariable = serviceVariable; service.attachedExprs = exprs; service.serviceClass = anonClassDef; service.absoluteResourcePath = absResourcePathPath; service.serviceNameLiteral = serviceNameLiteral; service.annAttachments = anonClassDef.annAttachments; service.pos = pos; service.name = createIdentifier(pos, anonymousModelHelper.getNextAnonymousServiceVarKey(packageID)); return service; } private BLangSimpleVariable createServiceVariable(Location pos, BLangClassDefinition annonClassDef, BLangTypeInit initNode) { BLangUserDefinedType typeName = createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), annonClassDef.name); BLangSimpleVariable serviceInstance = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); serviceInstance.typeNode = typeName; String serviceVarName = anonymousModelHelper.getNextAnonymousServiceVarKey(packageID); serviceInstance.name = createIdentifier(pos, serviceVarName); serviceInstance.expr = initNode; serviceInstance.internal = true; return serviceInstance; } @Override public BLangNode transform(ClassDefinitionNode classDefinitionNode) { BLangClassDefinition blangClass = (BLangClassDefinition) TreeBuilder.createClassDefNode(); blangClass.pos = getPositionWithoutMetadata(classDefinitionNode); blangClass.annAttachments = applyAll(getAnnotations(classDefinitionNode.metadata())); BLangIdentifier identifierNode = createIdentifier(classDefinitionNode.className()); blangClass.setName(identifierNode); blangClass.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(classDefinitionNode.metadata())); classDefinitionNode.visibilityQualifier().ifPresent(visibilityQual -> { if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) { blangClass.flagSet.add(Flag.PUBLIC); } }); setClassQualifiers(classDefinitionNode.classTypeQualifiers(), blangClass); NodeList<Node> members = classDefinitionNode.members(); for (Node node : members) { BLangNode bLangNode = node.apply(this); if (bLangNode.getKind() == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) { if (blangClass.initFunction == null) { bLangFunction.objInitFunction = true; blangClass.initFunction = bLangFunction; } else { blangClass.addFunction(bLangFunction); } } else { blangClass.addFunction(bLangFunction); } } else if (bLangNode.getKind() == NodeKind.VARIABLE) { blangClass.addField((BLangSimpleVariable) bLangNode); } else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) { blangClass.addTypeReference((BLangType) bLangNode); } } return blangClass; } @Override public BLangNode transform(RetryStatementNode retryStatementNode) { BLangRetrySpec retrySpec = createRetrySpec(retryStatementNode); Location pos = getPosition(retryStatementNode); StatementNode retryBody = retryStatementNode.retryBody(); if (retryBody.kind() == SyntaxKind.TRANSACTION_STATEMENT) { BLangRetryTransaction retryTransaction = (BLangRetryTransaction) TreeBuilder.createRetryTransactionNode(); retryTransaction.pos = pos; retryTransaction.setRetrySpec(retrySpec); retryTransaction.setTransaction((BLangTransaction) retryBody.apply(this)); return retryTransaction; } BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode(); retryNode.pos = pos; retryNode.setRetrySpec(retrySpec); BLangBlockStmt retryBlock = (BLangBlockStmt) retryBody.apply(this); retryNode.setRetryBody(retryBlock); retryStatementNode.onFailClause().ifPresent(onFailClauseNode -> { retryNode.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return retryNode; } private BLangRetrySpec createRetrySpec(RetryStatementNode retryStatementNode) { BLangRetrySpec retrySpec = (BLangRetrySpec) TreeBuilder.createRetrySpecNode(); if (retryStatementNode.typeParameter().isPresent()) { TypeParameterNode typeParam = retryStatementNode.typeParameter().get(); retrySpec.retryManagerType = createTypeNode(typeParam.typeNode()); retrySpec.pos = getPosition(typeParam); } if (retryStatementNode.arguments().isPresent()) { ParenthesizedArgList arg = retryStatementNode.arguments().get(); if (retryStatementNode.typeParameter().isPresent()) { retrySpec.pos = getPosition(retryStatementNode.typeParameter().get(), arg); } else { retrySpec.pos = getPosition(arg); } for (Node argNode : arg.arguments()) { retrySpec.argExprs.add(createExpression(argNode)); } } if (retrySpec.pos == null) { retrySpec.pos = getPosition(retryStatementNode); } return retrySpec; } @Override public BLangNode transform(TransactionalExpressionNode transactionalExpressionNode) { BLangTransactionalExpr transactionalExpr = TreeBuilder.createTransactionalExpressionNode(); transactionalExpr.pos = getPosition(transactionalExpressionNode); return transactionalExpr; } @Override public BLangNode transform(XMLFilterExpressionNode xmlFilterExpressionNode) { List<BLangXMLElementFilter> filters = new ArrayList<>(); XMLNamePatternChainingNode xmlNamePatternChainingNode = xmlFilterExpressionNode.xmlPatternChain(); for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) { filters.add(createXMLElementFilter(node)); } BLangExpression expr = createExpression(xmlFilterExpressionNode.expression()); BLangXMLElementAccess elementAccess = new BLangXMLElementAccess(getPosition(xmlFilterExpressionNode), null, expr, filters); return elementAccess; } @Override public BLangNode transform(XMLStepExpressionNode xmlStepExpressionNode) { List<BLangXMLElementFilter> filters = new ArrayList<>(); int starCount = 0; if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.SLASH_ASTERISK_TOKEN) { starCount = 1; } else if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.XML_NAME_PATTERN_CHAIN) { XMLNamePatternChainingNode xmlNamePatternChainingNode = (XMLNamePatternChainingNode) xmlStepExpressionNode.xmlStepStart(); for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) { filters.add(createXMLElementFilter(node)); } switch (xmlNamePatternChainingNode.startToken().kind()) { case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: starCount = 2; break; case SLASH_ASTERISK_TOKEN: starCount = 1; break; } } BLangExpression expr = createExpression(xmlStepExpressionNode.expression()); BLangXMLNavigationAccess xmlNavigationAccess = new BLangXMLNavigationAccess(getPosition(xmlStepExpressionNode), null, expr, filters, XMLNavigationAccess.NavAccessType.fromInt(starCount), null); return xmlNavigationAccess; } @Override public BLangNode transform(MatchStatementNode matchStatementNode) { BLangMatchStatement matchStatement = (BLangMatchStatement) TreeBuilder.createMatchStatementNode(); BLangExpression matchStmtExpr = createExpression(matchStatementNode.condition()); matchStatement.setExpression(matchStmtExpr); for (MatchClauseNode matchClauseNode : matchStatementNode.matchClauses()) { BLangMatchClause bLangMatchClause = (BLangMatchClause) TreeBuilder.createMatchClause(); bLangMatchClause.pos = getPosition(matchClauseNode); bLangMatchClause.expr = matchStmtExpr; boolean matchGuardAvailable = false; if (matchClauseNode.matchGuard().isPresent()) { matchGuardAvailable = true; BLangMatchGuard bLangMatchGuard = (BLangMatchGuard) TreeBuilder.createMatchGuard(); bLangMatchGuard.expr = createExpression(matchClauseNode.matchGuard().get().expression()); bLangMatchGuard.pos = getPosition(matchClauseNode.matchGuard().get()); bLangMatchClause.setMatchGuard(bLangMatchGuard); } for (Node matchPattern : matchClauseNode.matchPatterns()) { BLangMatchPattern bLangMatchPattern = transformMatchPattern(matchPattern); if (bLangMatchPattern != null) { bLangMatchPattern.matchExpr = matchStmtExpr; bLangMatchPattern.matchGuardIsAvailable = matchGuardAvailable; bLangMatchClause.addMatchPattern(bLangMatchPattern); } } bLangMatchClause.setBlockStatement((BLangBlockStmt) transform(matchClauseNode.blockStatement())); matchStatement.addMatchClause(bLangMatchClause); } matchStatementNode.onFailClause().ifPresent(onFailClauseNode -> { matchStatement.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); matchStatement.pos = getPosition(matchStatementNode); return matchStatement; } private BLangXMLSequenceLiteral createXmlSequence(TemplateExpressionNode expressionNode) { BLangXMLSequenceLiteral xmlSequenceLiteral = (BLangXMLSequenceLiteral) TreeBuilder.createXMLSequenceLiteralNode(); xmlSequenceLiteral.pos = getPosition(expressionNode); Node lastNode = null; List<Node> adjacentTextNodes = new ArrayList<>(); int xmlContentSize = expressionNode.content().size(); for (int index = 0; index < xmlContentSize; index++) { Node childItem = expressionNode.content().get(index); if (childItem.kind() == SyntaxKind.XML_TEXT || childItem.kind() == SyntaxKind.INTERPOLATION) { adjacentTextNodes.add(childItem); lastNode = childItem; if (index != xmlContentSize - 1) { continue; } } if (lastNode != null && (lastNode.kind() == SyntaxKind.XML_TEXT || lastNode.kind() == SyntaxKind.INTERPOLATION)) { if (adjacentTextNodes.size() > 1) { xmlSequenceLiteral.xmlItems.add((BLangExpression) createXMLTextLiteral(adjacentTextNodes)); } else { xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(lastNode)); } adjacentTextNodes.clear(); if (lastNode.kind() == childItem.kind()) { continue; } } xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(childItem)); lastNode = childItem; } return xmlSequenceLiteral; } public BLangExpression createXmlSingletonItem(Node xmlTypeNode) { switch (xmlTypeNode.kind()) { case XML_COMMENT: case XML_PI: case XML_ELEMENT: case XML_EMPTY_ELEMENT: return createExpression(xmlTypeNode); default: return (BLangExpression) createXMLTextLiteral(xmlTypeNode); } } public BLangNode createXmlTemplateLiteral(TemplateExpressionNode expressionNode) { if (expressionNode.content().isEmpty()) { return createXMLEmptyLiteral(expressionNode); } if (expressionNode.content().size() == 1) { return createXmlSingletonItem(expressionNode.content().get(0)); } return createXmlSequence(expressionNode); } private BLangMatchPattern transformMatchPattern(Node matchPattern) { Location matchPatternPos = matchPattern.location(); SyntaxKind kind = matchPattern.kind(); if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE && ((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) { BLangWildCardMatchPattern bLangWildCardMatchPattern = (BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern(); bLangWildCardMatchPattern.pos = matchPatternPos; return bLangWildCardMatchPattern; } if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) { BLangWildCardMatchPattern bLangWildCardMatchPattern = (BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern(); bLangWildCardMatchPattern.pos = matchPatternPos; return bLangWildCardMatchPattern; } if (kind == SyntaxKind.TYPED_BINDING_PATTERN) { TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern; BLangVarBindingPatternMatchPattern bLangVarBindingPattern = (BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern(); bLangVarBindingPattern.pos = matchPatternPos; bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern())); return bLangVarBindingPattern; } if (kind == SyntaxKind.ERROR_MATCH_PATTERN) { return transformErrorMatchPattern((ErrorMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) { return transformNamedArgMatchPattern((NamedArgMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.LIST_MATCH_PATTERN) { return transformListMatchPattern((ListMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.REST_MATCH_PATTERN) { return transformRestMatchPattern((RestMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) { return transformMappingMatchPattern((MappingMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.FIELD_MATCH_PATTERN) { return transformFieldMatchPattern((FieldMatchPatternNode) matchPattern, matchPatternPos); } assert (kind == SyntaxKind.NUMERIC_LITERAL || kind == SyntaxKind.STRING_LITERAL || kind == SyntaxKind.SIMPLE_NAME_REFERENCE || kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.IDENTIFIER_TOKEN || kind == SyntaxKind.NULL_LITERAL || kind == SyntaxKind.NIL_LITERAL || kind == SyntaxKind.BOOLEAN_LITERAL || kind == SyntaxKind.UNARY_EXPRESSION); BLangConstPattern bLangConstMatchPattern = (BLangConstPattern) TreeBuilder.createConstMatchPattern(); bLangConstMatchPattern.setExpression(createExpression(matchPattern)); bLangConstMatchPattern.pos = matchPatternPos; return bLangConstMatchPattern; } private BLangErrorMatchPattern transformErrorMatchPattern(ErrorMatchPatternNode errorMatchPatternNode, Location pos) { BLangErrorMatchPattern bLangErrorMatchPattern = (BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern(); bLangErrorMatchPattern.pos = pos; NameReferenceNode nameReferenceNode; if (errorMatchPatternNode.typeReference().isPresent()) { nameReferenceNode = errorMatchPatternNode.typeReference().get(); bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode); } if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) { return bLangErrorMatchPattern; } Node node = errorMatchPatternNode.argListMatchPatternNode().get(0); if (isErrorFieldMatchPattern(node)) { createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern); return bLangErrorMatchPattern; } bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node); if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) { return bLangErrorMatchPattern; } node = errorMatchPatternNode.argListMatchPatternNode().get(1); if (isErrorFieldMatchPattern(node)) { createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern); return bLangErrorMatchPattern; } bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node); createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern); return bLangErrorMatchPattern; } private BLangNamedArgMatchPattern transformNamedArgMatchPattern(NamedArgMatchPatternNode namedArgMatchPatternNode, Location pos) { BLangNamedArgMatchPattern bLangNamedArgMatchPattern = (BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern(); bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier()); bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern()); bLangNamedArgMatchPattern.pos = pos; return bLangNamedArgMatchPattern; } private BLangListMatchPattern transformListMatchPattern(ListMatchPatternNode listMatchPatternNode, Location pos) { BLangListMatchPattern bLangListMatchPattern = (BLangListMatchPattern) TreeBuilder.createListMatchPattern(); bLangListMatchPattern.pos = pos; SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns(); int matchPatternListSize = matchPatterns.size(); if (matchPatternListSize == 0) { return bLangListMatchPattern; } for (int i = 0; i < matchPatternListSize - 1; i++) { BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i)); if (bLangMemberMatchPattern == null) { continue; } bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern); } BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1)); if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) { bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember); } else { bLangListMatchPattern.addMatchPattern(lastMember); } return bLangListMatchPattern; } private BLangRestMatchPattern transformRestMatchPattern(RestMatchPatternNode restMatchPatternNode, Location pos) { BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern(); bLangRestMatchPattern.pos = pos; SimpleNameReferenceNode variableName = restMatchPatternNode.variableName(); bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name())); return bLangRestMatchPattern; } private BLangMappingMatchPattern transformMappingMatchPattern(MappingMatchPatternNode mappingMatchPatternNode, Location pos) { BLangMappingMatchPattern bLangMappingMatchPattern = (BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern(); bLangMappingMatchPattern.pos = pos; SeparatedNodeList<Node> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns(); int fieldMatchPatternListSize = fieldMatchPatterns.size(); if (fieldMatchPatternListSize == 0) { return bLangMappingMatchPattern; } for (int i = 0; i < fieldMatchPatternListSize - 1; i++) { bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern) transformMatchPattern(fieldMatchPatterns.get(i))); } BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1)); if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) { bLangMappingMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember); } else { bLangMappingMatchPattern.addFieldMatchPattern((BLangFieldMatchPattern) lastMember); } return bLangMappingMatchPattern; } private BLangFieldMatchPattern transformFieldMatchPattern(FieldMatchPatternNode fieldMatchPatternNode, Location pos) { BLangFieldMatchPattern bLangFieldMatchPattern = (BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern(); bLangFieldMatchPattern.pos = pos; bLangFieldMatchPattern.fieldName = createIdentifier(fieldMatchPatternNode.fieldNameNode()); bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern()); return bLangFieldMatchPattern; } private BLangBindingPattern transformBindingPattern(Node bindingPattern) { Location pos = getPosition(bindingPattern); SyntaxKind patternKind = bindingPattern.kind(); switch (patternKind) { case CAPTURE_BINDING_PATTERN: return transformCaptureBindingPattern((CaptureBindingPatternNode) bindingPattern, pos); case LIST_BINDING_PATTERN: return transformListBindingPattern((ListBindingPatternNode) bindingPattern, pos); case NAMED_ARG_BINDING_PATTERN: return transformNamedArgBindingPattern((NamedArgBindingPatternNode) bindingPattern, pos); case REST_BINDING_PATTERN: return transformRestBindingPattern((RestBindingPatternNode) bindingPattern, pos); case MAPPING_BINDING_PATTERN: return transformMappingBindingPattern((MappingBindingPatternNode) bindingPattern, pos); case FIELD_BINDING_PATTERN: return transformFieldBindingPattern(bindingPattern, pos); case ERROR_BINDING_PATTERN: return transformErrorBindingPattern((ErrorBindingPatternNode) bindingPattern, pos); case WILDCARD_BINDING_PATTERN: default: assert patternKind == SyntaxKind.WILDCARD_BINDING_PATTERN; return transformWildCardBindingPattern(pos); } } private BLangWildCardBindingPattern transformWildCardBindingPattern(Location pos) { BLangWildCardBindingPattern bLangWildCardBindingPattern = (BLangWildCardBindingPattern) TreeBuilder.createWildCardBindingPattern(); bLangWildCardBindingPattern.pos = pos; return bLangWildCardBindingPattern; } private BLangCaptureBindingPattern transformCaptureBindingPattern(CaptureBindingPatternNode captureBindingPattern, Location pos) { BLangCaptureBindingPattern bLangCaptureBindingPattern = (BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern(); bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPattern.variableName())); bLangCaptureBindingPattern.pos = pos; return bLangCaptureBindingPattern; } private BLangRestBindingPattern transformRestBindingPattern(RestBindingPatternNode restBindingPatternNode, Location pos) { BLangRestBindingPattern bLangRestBindingPattern = (BLangRestBindingPattern) TreeBuilder.createRestBindingPattern(); bLangRestBindingPattern.pos = pos; SimpleNameReferenceNode variableName = restBindingPatternNode.variableName(); bLangRestBindingPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name())); return bLangRestBindingPattern; } private BLangListBindingPattern transformListBindingPattern(ListBindingPatternNode listBindingPatternNode, Location pos) { BLangListBindingPattern bLangListBindingPattern = (BLangListBindingPattern) TreeBuilder.createListBindingPattern(); bLangListBindingPattern.pos = pos; for (Node listMemberBindingPattern : listBindingPatternNode.bindingPatterns()) { if (listMemberBindingPattern.kind() != SyntaxKind.REST_BINDING_PATTERN) { bLangListBindingPattern.addBindingPattern(transformBindingPattern(listMemberBindingPattern)); continue; } bLangListBindingPattern.restBindingPattern = (BLangRestBindingPattern) transformBindingPattern(listMemberBindingPattern); } return bLangListBindingPattern; } private BLangMappingBindingPattern transformMappingBindingPattern(MappingBindingPatternNode mappingBindingPatternNode, Location pos) { BLangMappingBindingPattern bLangMappingBindingPattern = (BLangMappingBindingPattern) TreeBuilder.createMappingBindingPattern(); bLangMappingBindingPattern.pos = pos; for (Node fieldBindingPattern : mappingBindingPatternNode.fieldBindingPatterns()) { if (fieldBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) { bLangMappingBindingPattern.restBindingPattern = (BLangRestBindingPattern) transformBindingPattern(fieldBindingPattern); continue; } bLangMappingBindingPattern.fieldBindingPatterns.add( (BLangFieldBindingPattern) transformBindingPattern(fieldBindingPattern)); } return bLangMappingBindingPattern; } private BLangFieldBindingPattern transformFieldBindingPattern(Node bindingPattern, Location pos) { BLangFieldBindingPattern bLangFieldBindingPattern = (BLangFieldBindingPattern) TreeBuilder.createFieldBindingPattern(); bLangFieldBindingPattern.pos = pos; if (bindingPattern instanceof FieldBindingPatternVarnameNode) { FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode = (FieldBindingPatternVarnameNode) bindingPattern; BLangIdentifier fieldName = createIdentifier(fieldBindingPatternVarnameNode.variableName().name()); bLangFieldBindingPattern.fieldName = fieldName; BLangCaptureBindingPattern bLangCaptureBindingPatternInFieldBindingPattern = (BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern(); bLangCaptureBindingPatternInFieldBindingPattern.setIdentifier(fieldName); bLangCaptureBindingPatternInFieldBindingPattern.pos = pos; bLangFieldBindingPattern.bindingPattern = bLangCaptureBindingPatternInFieldBindingPattern; return bLangFieldBindingPattern; } FieldBindingPatternFullNode fieldBindingPatternNode = (FieldBindingPatternFullNode) bindingPattern; bLangFieldBindingPattern.fieldName = createIdentifier(fieldBindingPatternNode.variableName().name()); bLangFieldBindingPattern.bindingPattern = transformBindingPattern(fieldBindingPatternNode.bindingPattern()); return bLangFieldBindingPattern; } private BLangNamedArgBindingPattern transformNamedArgBindingPattern(NamedArgBindingPatternNode namedArgBindingPattern, Location pos) { BLangNamedArgBindingPattern bLangNamedArgBindingPattern = (BLangNamedArgBindingPattern) TreeBuilder.createNamedArgBindingPattern(); bLangNamedArgBindingPattern.pos = pos; bLangNamedArgBindingPattern.argName = createIdentifier(namedArgBindingPattern.argName()); bLangNamedArgBindingPattern.bindingPattern = transformBindingPattern(namedArgBindingPattern.bindingPattern()); return bLangNamedArgBindingPattern; } private BLangErrorBindingPattern transformErrorBindingPattern(ErrorBindingPatternNode errorBindingPatternNode, Location pos) { BLangErrorBindingPattern bLangErrorBindingPattern = (BLangErrorBindingPattern) TreeBuilder.createErrorBindingPattern(); bLangErrorBindingPattern.pos = pos; if (errorBindingPatternNode.typeReference().isPresent()) { Node nameReferenceNode = errorBindingPatternNode.typeReference().get(); bLangErrorBindingPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode); } if (errorBindingPatternNode.argListBindingPatterns().size() == 0) { return bLangErrorBindingPattern; } Node node = errorBindingPatternNode.argListBindingPatterns().get(0); if (isErrorFieldBindingPattern(node)) { createErrorFieldBindingPatterns(0, errorBindingPatternNode, bLangErrorBindingPattern); return bLangErrorBindingPattern; } bLangErrorBindingPattern.errorMessageBindingPattern = createErrorMessageBindingPattern(node); if (errorBindingPatternNode.argListBindingPatterns().size() == 1) { return bLangErrorBindingPattern; } node = errorBindingPatternNode.argListBindingPatterns().get(1); if (isErrorFieldBindingPattern(node)) { createErrorFieldBindingPatterns(1, errorBindingPatternNode, bLangErrorBindingPattern); return bLangErrorBindingPattern; } bLangErrorBindingPattern.errorCauseBindingPattern = createErrorCauseBindingPattern(node); createErrorFieldBindingPatterns(2, errorBindingPatternNode, bLangErrorBindingPattern); return bLangErrorBindingPattern; } private boolean isErrorFieldMatchPattern(Node node) { return node.kind() == SyntaxKind.NAMED_ARG_MATCH_PATTERN || node.kind() == SyntaxKind.REST_MATCH_PATTERN; } private boolean isErrorFieldBindingPattern(Node node) { return node.kind() == SyntaxKind.NAMED_ARG_BINDING_PATTERN || node.kind() == SyntaxKind.REST_BINDING_PATTERN; } private BLangErrorMessageMatchPattern createErrorMessageMatchPattern(Node node) { BLangMatchPattern matchPattern = transformMatchPattern(node); BLangErrorMessageMatchPattern bLangErrorMessageMatchPattern = (BLangErrorMessageMatchPattern) TreeBuilder.createErrorMessageMatchPattern(); bLangErrorMessageMatchPattern.pos = getPosition(node); bLangErrorMessageMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern); return bLangErrorMessageMatchPattern; } private BLangErrorMessageBindingPattern createErrorMessageBindingPattern(Node node) { BLangBindingPattern bindingPattern = transformBindingPattern(node); BLangErrorMessageBindingPattern bLangErrorMessageBindingPattern = (BLangErrorMessageBindingPattern) TreeBuilder.createErrorMessageBindingPattern(); bLangErrorMessageBindingPattern.pos = getPosition(node); bLangErrorMessageBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern); return bLangErrorMessageBindingPattern; } private BLangErrorCauseMatchPattern createErrorCauseMatchPattern(Node node) { BLangMatchPattern matchPattern = transformMatchPattern(node); BLangErrorCauseMatchPattern bLangErrorCauseMatchPattern = (BLangErrorCauseMatchPattern) TreeBuilder.createErrorCauseMatchPattern(); bLangErrorCauseMatchPattern.pos = getPosition(node); if (matchPattern.getKind() == NodeKind.ERROR_MATCH_PATTERN) { bLangErrorCauseMatchPattern.errorMatchPattern = (BLangErrorMatchPattern) matchPattern; return bLangErrorCauseMatchPattern; } bLangErrorCauseMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern); return bLangErrorCauseMatchPattern; } private BLangErrorCauseBindingPattern createErrorCauseBindingPattern(Node node) { BLangBindingPattern bindingPattern = transformBindingPattern(node); BLangErrorCauseBindingPattern bLangErrorCauseBindingPattern = (BLangErrorCauseBindingPattern) TreeBuilder.createErrorCauseBindingPattern(); bLangErrorCauseBindingPattern.pos = getPosition(node); if (bindingPattern.getKind() == NodeKind.ERROR_BINDING_PATTERN) { bLangErrorCauseBindingPattern.errorBindingPattern = (BLangErrorBindingPattern) bindingPattern; return bLangErrorCauseBindingPattern; } bLangErrorCauseBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern); return bLangErrorCauseBindingPattern; } private BLangErrorFieldMatchPatterns createErrorFieldMatchPattern(Node errorFieldMatchPatternNode, BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns) { BLangMatchPattern matchPattern = transformMatchPattern(errorFieldMatchPatternNode); bLangErrorFieldMatchPatterns.pos = getPosition(errorFieldMatchPatternNode); if (matchPattern.getKind() == NodeKind.NAMED_ARG_MATCH_PATTERN) { bLangErrorFieldMatchPatterns.addNamedArgMatchPattern( (org.ballerinalang.model.tree.matchpatterns.NamedArgMatchPatternNode) matchPattern); } else if (matchPattern.getKind() == NodeKind.REST_MATCH_PATTERN) { bLangErrorFieldMatchPatterns.restMatchPattern = (BLangRestMatchPattern) matchPattern; } return bLangErrorFieldMatchPatterns; } private BLangErrorFieldBindingPatterns createErrorFieldBindingPattern(Node errorFieldBindingPatternNode, BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns) { BLangBindingPattern bindingPattern = transformBindingPattern(errorFieldBindingPatternNode); bLangErrorFieldBindingPatterns.pos = getPosition(errorFieldBindingPatternNode); if (bindingPattern.getKind() == NodeKind.NAMED_ARG_BINDING_PATTERN) { bLangErrorFieldBindingPatterns. addNamedArgBindingPattern( (org.ballerinalang.model.tree.bindingpattern.NamedArgBindingPatternNode) bindingPattern); } else if (bindingPattern.getKind() == NodeKind.REST_BINDING_PATTERN) { bLangErrorFieldBindingPatterns.restBindingPattern = (BLangRestBindingPattern) bindingPattern; } return bLangErrorFieldBindingPatterns; } private void createErrorFieldMatchPatterns(int index, ErrorMatchPatternNode errorMatchPatternNode, BLangErrorMatchPattern bLangErrorMatchPattern) { BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns = (BLangErrorFieldMatchPatterns) TreeBuilder.createErrorFieldMatchPattern(); for (int i = index; i < errorMatchPatternNode.argListMatchPatternNode().size(); i++) { Node errorFieldMatchPatternNode = errorMatchPatternNode.argListMatchPatternNode().get(i); bLangErrorMatchPattern.errorFieldMatchPatterns = createErrorFieldMatchPattern(errorFieldMatchPatternNode, bLangErrorFieldMatchPatterns); } } private void createErrorFieldBindingPatterns(int index, ErrorBindingPatternNode errorBindingPatternNode, BLangErrorBindingPattern bLangErrorBindingPattern) { BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns = (BLangErrorFieldBindingPatterns) TreeBuilder.createErrorFieldBindingPattern(); for (int i = index; i < errorBindingPatternNode.argListBindingPatterns().size(); i++) { Node errorFieldBindingPatternNode = errorBindingPatternNode.argListBindingPatterns().get(i); bLangErrorBindingPattern.errorFieldBindingPatterns = createErrorFieldBindingPattern(errorFieldBindingPatternNode, bLangErrorFieldBindingPatterns); } } private BLangSimpleMatchPattern createSimpleMatchPattern(BLangNode bLangNode) { BLangSimpleMatchPattern bLangSimpleMatchPattern = (BLangSimpleMatchPattern) TreeBuilder.createSimpleMatchPattern(); NodeKind kind = bLangNode.getKind(); switch (kind) { case WILDCARD_MATCH_PATTERN: bLangSimpleMatchPattern.wildCardMatchPattern = (BLangWildCardMatchPattern) bLangNode; break; case CONST_MATCH_PATTERN: bLangSimpleMatchPattern.constPattern = (BLangConstPattern) bLangNode; break; case VAR_BINDING_PATTERN_MATCH_PATTERN: bLangSimpleMatchPattern.varVariableName = (BLangVarBindingPatternMatchPattern) bLangNode; break; } return bLangSimpleMatchPattern; } private BLangCaptureBindingPattern createCaptureBindingPattern(CaptureBindingPatternNode captureBindingPatternNode) { BLangCaptureBindingPattern bLangCaptureBindingPattern = (BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern(); bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPatternNode .variableName())); bLangCaptureBindingPattern.pos = getPosition(captureBindingPatternNode); return bLangCaptureBindingPattern; } private BLangSimpleBindingPattern createSimpleBindingPattern(BLangNode bLangNode) { BLangSimpleBindingPattern bLangSimpleBindingPattern = (BLangSimpleBindingPattern) TreeBuilder.createSimpleBindingPattern(); NodeKind kind = bLangNode.getKind(); switch (kind) { case WILDCARD_BINDING_PATTERN: bLangSimpleBindingPattern.wildCardBindingPattern = (BLangWildCardBindingPattern) bLangNode; break; case CAPTURE_BINDING_PATTERN: bLangSimpleBindingPattern.captureBindingPattern = (BLangCaptureBindingPattern) bLangNode; break; } return bLangSimpleBindingPattern; } private BLangXMLElementFilter createXMLElementFilter(Node node) { String ns = ""; String elementName = "*"; Location nsPos = null; Location elemNamePos = null; SyntaxKind kind = node.kind(); switch (kind) { case SIMPLE_NAME_REFERENCE: SimpleNameReferenceNode simpleNameReferenceNode = (SimpleNameReferenceNode) node; elementName = simpleNameReferenceNode.name().text(); elemNamePos = getPosition(simpleNameReferenceNode); break; case QUALIFIED_NAME_REFERENCE: QualifiedNameReferenceNode qualifiedNameReferenceNode = (QualifiedNameReferenceNode) node; elementName = qualifiedNameReferenceNode.identifier().text(); elemNamePos = getPosition(qualifiedNameReferenceNode.identifier()); ns = qualifiedNameReferenceNode.modulePrefix().text(); nsPos = getPosition(qualifiedNameReferenceNode.modulePrefix()); break; case XML_ATOMIC_NAME_PATTERN: XMLAtomicNamePatternNode atomicNamePatternNode = (XMLAtomicNamePatternNode) node; elementName = atomicNamePatternNode.name().text(); elemNamePos = getPosition(atomicNamePatternNode.name()); ns = atomicNamePatternNode.prefix().text(); nsPos = getPosition(atomicNamePatternNode.prefix()); break; case ASTERISK_TOKEN: elemNamePos = getPosition(node); } if (stringStartsWithSingleQuote(ns)) { ns = ns.substring(1); } if (stringStartsWithSingleQuote(elementName)) { elementName = elementName.substring(1); } return new BLangXMLElementFilter(getPosition(node), null, ns, nsPos, elementName, elemNamePos); } private boolean stringStartsWithSingleQuote(String ns) { return ns != null && ns.length() > 0 && ns.charAt(0) == '\''; } private String getValueFromByteArrayNode(ByteArrayLiteralNode byteArrayLiteralNode) { StringBuilder value = new StringBuilder(); value.append(byteArrayLiteralNode.type().text()); value.append(" "); value.append("`"); if (byteArrayLiteralNode.content().isPresent()) { value.append(byteArrayLiteralNode.content().get().text()); } value.append("`"); return value.toString(); } private BLangRecordVariable createBLangRecordVariable(MappingBindingPatternNode mappingBindingPatternNode) { BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode(); List<BLangRecordVariableKeyValue> fieldBindingPatternsList = new ArrayList<>(); for (BindingPatternNode node : mappingBindingPatternNode.fieldBindingPatterns()) { BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue(); if (node instanceof FieldBindingPatternFullNode) { FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) node; recordKeyValue.key = createIdentifier(fullNode.variableName().name()); recordKeyValue.valueBindingPattern = getBLangVariableNode(fullNode.bindingPattern()); } else if (node instanceof FieldBindingPatternVarnameNode) { FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) node; recordKeyValue.key = createIdentifier(varnameNode.variableName().name()); BLangSimpleVariable value = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); value.pos = getPosition(varnameNode); IdentifierNode name = createIdentifier(varnameNode.variableName().name()); ((BLangIdentifier) name).pos = value.pos; value.setName(name); recordKeyValue.valueBindingPattern = value; } else { recordVariable.restParam = getBLangVariableNode(node); break; } fieldBindingPatternsList.add(recordKeyValue); } recordVariable.variableList = fieldBindingPatternsList; recordVariable.pos = getPosition(mappingBindingPatternNode); return recordVariable; } private BLangLiteral createEmptyLiteral() { BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); bLiteral.value = ""; bLiteral.originalValue = ""; bLiteral.setBType(symTable.getTypeFromTag(TypeTags.STRING)); return bLiteral; } private BLangVariable createSimpleVariable(Location location, Token identifier, Location identifierPos) { BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); memberVar.pos = location; IdentifierNode name = createIdentifier(identifierPos, identifier); ((BLangIdentifier) name).pos = identifierPos; memberVar.setName(name); return memberVar; } private BLangVariable getBLangVariableNode(BindingPatternNode bindingPattern) { Token varName; switch (bindingPattern.kind()) { case MAPPING_BINDING_PATTERN: MappingBindingPatternNode mappingBindingPatternNode = (MappingBindingPatternNode) bindingPattern; return createBLangRecordVariable(mappingBindingPatternNode); case LIST_BINDING_PATTERN: ListBindingPatternNode listBindingPatternNode = (ListBindingPatternNode) bindingPattern; BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode(); tupleVariable.pos = getPosition(listBindingPatternNode); for (BindingPatternNode memberBindingPattern : listBindingPatternNode.bindingPatterns()) { if (memberBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) { tupleVariable.restVariable = getBLangVariableNode(memberBindingPattern); } else { BLangVariable member = getBLangVariableNode(memberBindingPattern); tupleVariable.memberVariables.add(member); } } return tupleVariable; case ERROR_BINDING_PATTERN: ErrorBindingPatternNode errorBindingPatternNode = (ErrorBindingPatternNode) bindingPattern; BLangErrorVariable bLangErrorVariable = (BLangErrorVariable) TreeBuilder.createErrorVariableNode(); bLangErrorVariable.pos = getPosition(errorBindingPatternNode); Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference(); if (errorTypeRef.isPresent()) { bLangErrorVariable.typeNode = createTypeNode(errorTypeRef.get()); } SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns(); int numberOfArgs = argListBindingPatterns.size(); List<BLangErrorVariable.BLangErrorDetailEntry> namedArgs = new ArrayList<>(); for (int position = 0; position < numberOfArgs; position++) { BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position); switch (bindingPatternNode.kind()) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: if (position == 0) { bLangErrorVariable.message = (BLangSimpleVariable) getBLangVariableNode(bindingPatternNode); break; } case ERROR_BINDING_PATTERN: bLangErrorVariable.cause = getBLangVariableNode(bindingPatternNode); break; case NAMED_ARG_BINDING_PATTERN: NamedArgBindingPatternNode namedArgBindingPatternNode = (NamedArgBindingPatternNode) bindingPatternNode; BLangIdentifier key = createIdentifier(namedArgBindingPatternNode.argName()); BLangVariable valueBindingPattern = getBLangVariableNode(namedArgBindingPatternNode.bindingPattern()); BLangErrorVariable.BLangErrorDetailEntry detailEntry = new BLangErrorVariable.BLangErrorDetailEntry(key, valueBindingPattern); namedArgs.add(detailEntry); break; default: bLangErrorVariable.restDetail = (BLangSimpleVariable) getBLangVariableNode(bindingPatternNode); } } bLangErrorVariable.detail = namedArgs; return bLangErrorVariable; case REST_BINDING_PATTERN: RestBindingPatternNode restBindingPatternNode = (RestBindingPatternNode) bindingPattern; varName = restBindingPatternNode.variableName().name(); break; case WILDCARD_BINDING_PATTERN: WildcardBindingPatternNode wildcardBindingPatternNode = (WildcardBindingPatternNode) bindingPattern; varName = wildcardBindingPatternNode.underscoreToken(); break; case CAPTURE_BINDING_PATTERN: default: CaptureBindingPatternNode captureBindingPatternNode = (CaptureBindingPatternNode) bindingPattern; varName = captureBindingPatternNode.variableName(); break; } Location pos = getPosition(bindingPattern); return createSimpleVariable(pos, varName, getPosition(varName)); } BLangValueType addValueType(Location pos, TypeKind typeKind) { BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); typeNode.pos = pos; typeNode.typeKind = typeKind; return typeNode; } private List<BLangStatement> generateBLangStatements(NodeList<StatementNode> statementNodes) { List<BLangStatement> statements = new ArrayList<>(); return generateAndAddBLangStatements(statementNodes, statements); } private List<BLangStatement> generateAndAddBLangStatements(NodeList<StatementNode> statementNodes, List<BLangStatement> statements) { for (StatementNode statement : statementNodes) { if (statement != null) { if (statement.kind() == SyntaxKind.FORK_STATEMENT) { generateForkStatements(statements, (ForkStatementNode) statement); continue; } statements.add((BLangStatement) statement.apply(this)); } } return statements; } private String extractVersion(SeparatedNodeList<Token> versionNumbers) { StringBuilder version = new StringBuilder(); int size = versionNumbers.size(); for (int i = 0; i < size; i++) { if (i != 0) { version.append("."); } version.append(versionNumbers.get(i).text()); } return version.toString(); } private void generateForkStatements(List<BLangStatement> statements, ForkStatementNode forkStatementNode) { BLangForkJoin forkJoin = (BLangForkJoin) forkStatementNode.apply(this); String nextAnonymousForkKey = anonymousModelHelper.getNextAnonymousForkKey(packageID); for (NamedWorkerDeclarationNode workerDeclarationNode : forkStatementNode.namedWorkerDeclarations()) { BLangSimpleVariableDef workerDef = (BLangSimpleVariableDef) workerDeclarationNode.apply(this); workerDef.isWorker = true; workerDef.isInFork = true; workerDef.var.flagSet.add(Flag.FORKED); BLangFunction function = ((BLangLambdaFunction) workerDef.var.expr).function; function.addFlag(Flag.FORKED); function.anonForkName = nextAnonymousForkKey; statements.add(workerDef); while (!this.additionalStatements.empty()) { statements.add(additionalStatements.pop()); } forkJoin.addWorkers(workerDef); } statements.add(forkJoin); } private BLangCheckedExpr createCheckExpr(Location pos, BLangExpression expr) { BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode(); checkedExpr.pos = pos; checkedExpr.expr = expr; return checkedExpr; } private BLangCheckPanickedExpr createCheckPanickedExpr(Location pos, BLangExpression expr) { BLangCheckPanickedExpr checkPanickedExpr = (BLangCheckPanickedExpr) TreeBuilder.createCheckPanicExpressionNode(); checkPanickedExpr.pos = pos; checkPanickedExpr.expr = expr; return checkPanickedExpr; } private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) { for (ParameterNode child : funcSignature.parameters()) { SimpleVariableNode param = (SimpleVariableNode) child.apply(this); if (child instanceof RestParameterNode) { bLFunction.setRestParameter(param); } else { bLFunction.addParameter(param); } } Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc(); if (retNode.isPresent()) { ReturnTypeDescriptorNode returnType = retNode.get(); bLFunction.setReturnTypeNode(createTypeNode(returnType.type())); bLFunction.returnTypeAnnAttachments = applyAll(returnType.annotations()); } else { BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode(); bLValueType.pos = symTable.builtinPos; bLValueType.typeKind = TypeKind.NIL; bLFunction.setReturnTypeNode(bLValueType); } } private BLangUnaryExpr createBLangUnaryExpr(Location location, OperatorKind operatorKind, BLangExpression expr) { BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode(); bLUnaryExpr.pos = location; bLUnaryExpr.operator = operatorKind; bLUnaryExpr.expr = expr; return bLUnaryExpr; } private BLangExpression createExpression(Node expression) { if (expression.kind() == SyntaxKind.ASYNC_SEND_ACTION) { dlog.error(getPosition(expression), DiagnosticErrorCode.ASYNC_SEND_NOT_YET_SUPPORTED_AS_EXPRESSION); Token missingIdentifier = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN, NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList()); expression = NodeFactory.createSimpleNameReferenceNode(missingIdentifier); } return (BLangExpression) createActionOrExpression(expression); } private BLangNode createActionOrExpression(Node actionOrExpression) { if (isSimpleLiteral(actionOrExpression.kind())) { return createSimpleLiteral(actionOrExpression); } else if (actionOrExpression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE || actionOrExpression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || actionOrExpression.kind() == SyntaxKind.IDENTIFIER_TOKEN) { BLangNameReference nameReference = createBLangNameReference(actionOrExpression); BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); bLVarRef.pos = getPosition(actionOrExpression); bLVarRef.pkgAlias = this.createIdentifier((Location) nameReference.pkgAlias.getPosition(), nameReference.pkgAlias.getValue()); bLVarRef.variableName = this.createIdentifier((Location) nameReference.name.getPosition(), nameReference.name.getValue()); return bLVarRef; } else if (actionOrExpression.kind() == SyntaxKind.BRACED_EXPRESSION) { BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode(); group.expression = (BLangExpression) actionOrExpression.apply(this); group.pos = getPosition(actionOrExpression); return group; } else if (isType(actionOrExpression.kind())) { BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode(); typeAccessExpr.pos = getPosition(actionOrExpression); typeAccessExpr.typeNode = createTypeNode(actionOrExpression); return typeAccessExpr; } else { return actionOrExpression.apply(this); } } private BLangNode createStringTemplateLiteral(NodeList<Node> memberNodes, Location location) { BLangStringTemplateLiteral stringTemplateLiteral = (BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode(); for (Node memberNode : memberNodes) { stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this)); } if (stringTemplateLiteral.exprs.isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = location; stringTemplateLiteral.exprs.add(emptyLiteral); } stringTemplateLiteral.pos = location; return stringTemplateLiteral; } private BLangRawTemplateLiteral createRawTemplateLiteral(NodeList<Node> members, Location location) { BLangRawTemplateLiteral literal = (BLangRawTemplateLiteral) TreeBuilder.createRawTemplateLiteralNode(); literal.pos = location; boolean prevNodeWasInterpolation = false; Node firstMember = members.isEmpty() ? null : members.get(0); if (firstMember != null && firstMember.kind() == SyntaxKind.INTERPOLATION) { literal.strings.add(createStringLiteral("", getPosition(firstMember))); } for (Node member : members) { if (member.kind() == SyntaxKind.INTERPOLATION) { literal.insertions.add((BLangExpression) member.apply(this)); if (prevNodeWasInterpolation) { literal.strings.add(createStringLiteral("", getPosition(member))); } prevNodeWasInterpolation = true; } else { literal.strings.add((BLangLiteral) member.apply(this)); prevNodeWasInterpolation = false; } } if (prevNodeWasInterpolation) { literal.strings.add(createStringLiteral("", getPosition(members.get(members.size() - 1)))); } return literal; } private BLangSimpleVariable createSimpleVar(Optional<Token> name, Node type, NodeList<AnnotationNode> annotations) { if (name.isPresent()) { Token nameToken = name.get(); return createSimpleVar(nameToken, type, null, null, annotations); } return createSimpleVar(null, type, null, null, annotations); } private BLangSimpleVariable createSimpleVar(Token name, Node type, NodeList<AnnotationNode> annotations) { return createSimpleVar(name, type, null, null, annotations); } private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer, Token visibilityQualifier, NodeList<AnnotationNode> annotations) { BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); bLSimpleVar.setName(this.createIdentifier(name)); bLSimpleVar.name.pos = getPosition(name); if (isDeclaredWithVar(typeName)) { bLSimpleVar.isDeclaredWithVar = true; } else { bLSimpleVar.setTypeNode(createTypeNode(typeName)); } if (visibilityQualifier != null) { if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) { bLSimpleVar.flagSet.add(Flag.PRIVATE); } else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) { bLSimpleVar.flagSet.add(Flag.PUBLIC); } } if (initializer != null) { bLSimpleVar.setInitialExpression(createExpression(initializer)); } if (annotations != null) { bLSimpleVar.annAttachments = applyAll(annotations); } return bLSimpleVar; } private boolean isDeclaredWithVar(Node typeNode) { if (typeNode == null || typeNode.kind() == SyntaxKind.VAR_TYPE_DESC) { return true; } return false; } private BLangIdentifier createIdentifier(Token token) { return createIdentifier(getPosition(token), token); } private BLangIdentifier createIdentifier(Location pos, Token token) { if (token == null) { return createIdentifier(pos, null, null); } String identifierName = token.text(); if (token.isMissing() || identifierName.equals(IDENTIFIER_LITERAL_PREFIX)) { identifierName = missingNodesHelper.getNextMissingNodeName(packageID); } return createIdentifier(pos, identifierName); } private BLangIdentifier createIdentifier(Location pos, String value) { return createIdentifier(pos, value, null); } private BLangIdentifier createIdentifier(Location pos, String value, Set<Whitespace> ws) { BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode(); if (value == null) { return bLIdentifer; } if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) { bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value.substring(1))); bLIdentifer.originalValue = value; bLIdentifer.setLiteral(true); } else { bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value)); bLIdentifer.setLiteral(false); } bLIdentifer.pos = pos; if (ws != null) { bLIdentifer.addWS(ws); } return bLIdentifer; } private BLangLiteral createEmptyStringLiteral(Location pos) { BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); bLiteral.pos = pos; bLiteral.setBType(symTable.stringType); bLiteral.value = ""; bLiteral.originalValue = ""; return bLiteral; } private BLangLiteral createSimpleLiteral(Node literal) { return createSimpleLiteral(literal, false); } private BLangLiteral createSimpleLiteral(Node literal, boolean isFiniteType) { if (literal.kind() == SyntaxKind.UNARY_EXPRESSION) { UnaryExpressionNode unaryExpr = (UnaryExpressionNode) literal; BLangLiteral bLangLiteral = createSimpleLiteral(unaryExpr.expression(), unaryExpr.unaryOperator().kind(), isFiniteType); bLangLiteral.pos = getPosition(unaryExpr); return bLangLiteral; } return createSimpleLiteral(literal, SyntaxKind.NONE, isFiniteType); } private BLangLiteral createSimpleLiteral(Node literal, SyntaxKind sign, boolean isFiniteType) { BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); SyntaxKind type = literal.kind(); int typeTag = -1; Object value = null; String originalValue = null; String textValue; if (literal instanceof BasicLiteralNode) { textValue = ((BasicLiteralNode) literal).literalToken().text(); } else if (literal instanceof Token) { textValue = ((Token) literal).text(); } else { textValue = ""; } if (sign == SyntaxKind.PLUS_TOKEN) { textValue = "+" + textValue; } else if (sign == SyntaxKind.MINUS_TOKEN) { textValue = "-" + textValue; } if (type == SyntaxKind.NUMERIC_LITERAL) { SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind(); if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { typeTag = TypeTags.INT; value = getIntegerLiteral(literal, textValue, sign); originalValue = textValue; bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN && withinByteRange(value)) { typeTag = TypeTags.BYTE; } } else if (literalTokenKind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN) { typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT; if (isFiniteType) { value = textValue.replaceAll("[fd+]", ""); originalValue = textValue.replace("+", ""); } else { value = textValue; originalValue = textValue; } bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); } else if (literalTokenKind == SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN) { typeTag = TypeTags.FLOAT; value = getHexNodeValue(textValue); originalValue = textValue; bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); } } else if (type == SyntaxKind.BOOLEAN_LITERAL) { typeTag = TypeTags.BOOLEAN; value = Boolean.parseBoolean(textValue); originalValue = textValue; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT || type == SyntaxKind.TEMPLATE_STRING || type == SyntaxKind.IDENTIFIER_TOKEN) { String text = textValue; if (type == SyntaxKind.STRING_LITERAL) { if (text.length() > 1 && text.charAt(text.length() - 1) == '"') { text = text.substring(1, text.length() - 1); } else { text = text.substring(1); } } String originalText = text; Location pos = getPosition(literal); Matcher matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text); int position = 0; while (matcher.find(position)) { String hexStringVal = matcher.group(1); int hexDecimalVal = Integer.parseInt(hexStringVal, 16); if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE) || hexDecimalVal > Constants.MAX_UNICODE) { String hexStringWithBraces = matcher.group(0); int offset = originalText.indexOf(hexStringWithBraces) + 1; dlog.error(new BLangDiagnosticLocation(currentCompUnitName, pos.lineRange().startLine().line(), pos.lineRange().endLine().line(), pos.lineRange().startLine().offset() + offset, pos.lineRange().startLine().offset() + offset + hexStringWithBraces.length()), DiagnosticErrorCode.INVALID_UNICODE, hexStringWithBraces); } text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal)); position = matcher.end() - 2; matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text); } if (type != SyntaxKind.TEMPLATE_STRING && type != SyntaxKind.XML_TEXT_CONTENT) { try { text = StringEscapeUtils.unescapeJava(text); } catch (Exception e) { dlog.error(pos, DiagnosticErrorCode.INVALID_UNICODE, originalText); } } typeTag = TypeTags.STRING; value = text; originalValue = textValue; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.NIL_LITERAL) { originalValue = "()"; typeTag = TypeTags.NIL; value = "()"; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.NULL_LITERAL) { originalValue = "null"; typeTag = TypeTags.NIL; value = "null"; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.BINARY_EXPRESSION) { typeTag = TypeTags.BYTE_ARRAY; value = textValue; originalValue = textValue; if (isNumericLiteral(type)) { bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); } else { bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } } else if (type == SyntaxKind.BYTE_ARRAY_LITERAL) { return (BLangLiteral) literal.apply(this); } bLiteral.pos = getPosition(literal); bLiteral.setBType(symTable.getTypeFromTag(typeTag)); bLiteral.getBType().tag = typeTag; bLiteral.value = value; bLiteral.originalValue = originalValue; return bLiteral; } private BLangLiteral createStringLiteral(String value, Location pos) { BLangLiteral strLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); strLiteral.value = strLiteral.originalValue = value; strLiteral.setBType(symTable.stringType); strLiteral.pos = pos; return strLiteral; } private BLangType createTypeNode(Node type) { if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) { return createBuiltInTypeNode(type); } else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) { BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); BLangNameReference nameReference = createBLangNameReference(type); bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name; bLUserDefinedType.pos = getPosition(type); return bLUserDefinedType; } else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) { if (type.hasDiagnostics()) { BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); BLangIdentifier pkgAlias = this.createIdentifier(null, ""); BLangIdentifier name = this.createIdentifier(((SimpleNameReferenceNode) type).name()); BLangNameReference nameReference = new BLangNameReference(getPosition(type), null, pkgAlias, name); bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name; bLUserDefinedType.pos = getPosition(type); return bLUserDefinedType; } SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type; return createTypeNode(nameReferenceNode.name()); } return (BLangType) type.apply(this); } private BLangType createBuiltInTypeNode(Node type) { String typeText; if (type.kind() == SyntaxKind.NIL_TYPE_DESC) { typeText = "()"; } else if (type instanceof BuiltinSimpleNameReferenceNode) { BuiltinSimpleNameReferenceNode simpleNameRef = (BuiltinSimpleNameReferenceNode) type; if (simpleNameRef.kind() == SyntaxKind.VAR_TYPE_DESC) { return null; } else if (simpleNameRef.name().isMissing()) { String name = missingNodesHelper.getNextMissingNodeName(packageID); BLangIdentifier identifier = createIdentifier(getPosition(simpleNameRef.name()), name); BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); return createUserDefinedType(getPosition(type), pkgAlias, identifier); } typeText = simpleNameRef.name().text(); } else { typeText = ((Token) type).text(); } TypeKind typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", "")); SyntaxKind kind = type.kind(); switch (kind) { case BOOLEAN_TYPE_DESC: case INT_TYPE_DESC: case BYTE_TYPE_DESC: case FLOAT_TYPE_DESC: case DECIMAL_TYPE_DESC: case STRING_TYPE_DESC: case ANY_TYPE_DESC: case NIL_TYPE_DESC: case HANDLE_TYPE_DESC: case ANYDATA_TYPE_DESC: case READONLY_TYPE_DESC: BLangValueType valueType = (BLangValueType) TreeBuilder.createValueTypeNode(); valueType.typeKind = typeKind; valueType.pos = getPosition(type); return valueType; default: BLangBuiltInRefTypeNode builtInValueType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); builtInValueType.typeKind = typeKind; builtInValueType.pos = getPosition(type); return builtInValueType; } } private VariableNode createBasicVarNodeWithoutType(Location location, Set<Whitespace> ws, String identifier, Location identifierLocation, ExpressionNode expr) { BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); bLSimpleVar.pos = location; IdentifierNode name = this.createIdentifier(identifierLocation, identifier, ws); ((BLangIdentifier) name).pos = identifierLocation; bLSimpleVar.setName(name); bLSimpleVar.addWS(ws); if (expr != null) { bLSimpleVar.setInitialExpression(expr); } return bLSimpleVar; } private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments, Location position, boolean isAsync) { BLangInvocation bLInvocation; if (isAsync) { bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation(); } else { bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); } BLangNameReference reference = createBLangNameReference(nameNode); bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias; bLInvocation.name = (BLangIdentifier) reference.name; List<BLangExpression> args = new ArrayList<>(); arguments.iterator().forEachRemaining(arg -> args.add(createExpression(arg))); bLInvocation.argExprs = args; bLInvocation.pos = position; return bLInvocation; } private BLangNameReference createBLangNameReference(Node node) { switch (node.kind()) { case QUALIFIED_NAME_REFERENCE: QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node; Token modulePrefix = iNode.modulePrefix(); IdentifierToken identifier = iNode.identifier(); BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix); Location namePos = getPosition(identifier); BLangIdentifier name = this.createIdentifier(namePos, identifier); return new BLangNameReference(getPosition(node), null, pkgAlias, name); case ERROR_TYPE_DESC: node = ((BuiltinSimpleNameReferenceNode) node).name(); break; case NEW_KEYWORD: case IDENTIFIER_TOKEN: case ERROR_KEYWORD: break; case SIMPLE_NAME_REFERENCE: default: node = ((SimpleNameReferenceNode) node).name(); break; } Token iToken = (Token) node; BLangIdentifier pkgAlias = this.createIdentifier(symTable.builtinPos, ""); BLangIdentifier name = this.createIdentifier(iToken); return new BLangNameReference(getPosition(node), null, pkgAlias, name); } private BLangMarkdownDocumentation createMarkdownDocumentationAttachment(Optional<Node> markdownDocumentationNode) { if (markdownDocumentationNode == null || !markdownDocumentationNode.isPresent()) { return null; } BLangMarkdownDocumentation doc = (BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode(); LinkedList<BLangMarkdownDocumentationLine> documentationLines = new LinkedList<>(); LinkedList<BLangMarkdownParameterDocumentation> parameters = new LinkedList<>(); LinkedList<BLangMarkdownReferenceDocumentation> references = new LinkedList<>(); MarkdownDocumentationNode markdownDocNode = (MarkdownDocumentationNode) markdownDocumentationNode.get(); NodeList<Node> docLineList = markdownDocNode.documentationLines(); BLangMarkdownParameterDocumentation bLangParaDoc = null; BLangMarkdownReturnParameterDocumentation bLangReturnParaDoc = null; BLangMarkDownDeprecationDocumentation bLangDeprecationDoc = null; BLangMarkDownDeprecatedParametersDocumentation bLangDeprecatedParaDoc = null; for (Node singleDocLine : docLineList) { switch (singleDocLine.kind()) { case MARKDOWN_DOCUMENTATION_LINE: case MARKDOWN_REFERENCE_DOCUMENTATION_LINE: MarkdownDocumentationLineNode docLineNode = (MarkdownDocumentationLineNode) singleDocLine; NodeList<Node> docElements = docLineNode.documentElements(); String docText = addReferencesAndReturnDocumentationText(references, docElements); if (bLangDeprecationDoc != null) { bLangDeprecationDoc.deprecationDocumentationLines.add(docText); } else if (bLangReturnParaDoc != null) { bLangReturnParaDoc.returnParameterDocumentationLines.add(docText); } else if (bLangParaDoc != null) { bLangParaDoc.parameterDocumentationLines.add(docText); } else { BLangMarkdownDocumentationLine bLangDocLine = (BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode(); bLangDocLine.text = docText; bLangDocLine.pos = getPosition(docLineNode); documentationLines.add(bLangDocLine); } break; case MARKDOWN_PARAMETER_DOCUMENTATION_LINE: bLangParaDoc = new BLangMarkdownParameterDocumentation(); MarkdownParameterDocumentationLineNode parameterDocLineNode = (MarkdownParameterDocumentationLineNode) singleDocLine; BLangIdentifier paraName = new BLangIdentifier(); Token parameterName = parameterDocLineNode.parameterName(); String parameterNameValue = parameterName.isMissing() ? "" : IdentifierUtils.unescapeUnicodeCodepoints(parameterName.text()); if (stringStartsWithSingleQuote(parameterNameValue)) { parameterNameValue = parameterNameValue.substring(1); } paraName.value = parameterNameValue; bLangParaDoc.parameterName = paraName; NodeList<Node> paraDocElements = parameterDocLineNode.documentElements(); String paraDocText = addReferencesAndReturnDocumentationText(references, paraDocElements); bLangParaDoc.parameterDocumentationLines.add(paraDocText); bLangParaDoc.pos = getPosition(parameterName); if (bLangDeprecatedParaDoc != null) { bLangDeprecatedParaDoc.parameters.add(bLangParaDoc); } else if (bLangDeprecationDoc != null) { bLangDeprecatedParaDoc = new BLangMarkDownDeprecatedParametersDocumentation(); bLangDeprecatedParaDoc.parameters.add(bLangParaDoc); bLangDeprecationDoc = null; } else { parameters.add(bLangParaDoc); } break; case MARKDOWN_RETURN_PARAMETER_DOCUMENTATION_LINE: bLangReturnParaDoc = new BLangMarkdownReturnParameterDocumentation(); MarkdownParameterDocumentationLineNode returnParaDocLineNode = (MarkdownParameterDocumentationLineNode) singleDocLine; NodeList<Node> returnParaDocElements = returnParaDocLineNode.documentElements(); String returnParaDocText = addReferencesAndReturnDocumentationText(references, returnParaDocElements); bLangReturnParaDoc.returnParameterDocumentationLines.add(returnParaDocText); bLangReturnParaDoc.pos = getPosition(returnParaDocLineNode); doc.returnParameter = bLangReturnParaDoc; break; case MARKDOWN_DEPRECATION_DOCUMENTATION_LINE: bLangDeprecationDoc = new BLangMarkDownDeprecationDocumentation(); MarkdownDocumentationLineNode deprecationDocLineNode = (MarkdownDocumentationLineNode) singleDocLine; String lineText = ((Token) deprecationDocLineNode.documentElements().get(0)).text(); bLangDeprecationDoc.addDeprecationLine(" bLangDeprecationDoc.pos = getPosition(deprecationDocLineNode); break; case MARKDOWN_CODE_BLOCK: MarkdownCodeBlockNode codeBlockNode = (MarkdownCodeBlockNode) singleDocLine; transformCodeBlock(documentationLines, codeBlockNode); break; default: break; } } doc.documentationLines = documentationLines; doc.parameters = parameters; doc.references = references; doc.deprecationDocumentation = bLangDeprecationDoc; doc.deprecatedParametersDocumentation = bLangDeprecatedParaDoc; doc.pos = getPosition(markdownDocNode); return doc; } private void transformCodeBlock(LinkedList<BLangMarkdownDocumentationLine> documentationLines, MarkdownCodeBlockNode codeBlockNode) { BLangMarkdownDocumentationLine bLangDocLine = (BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode(); StringBuilder docText = new StringBuilder(); if (codeBlockNode.langAttribute().isPresent()) { docText.append(codeBlockNode.startBacktick().text()); docText.append(codeBlockNode.langAttribute().get().toString()); } else { docText.append(codeBlockNode.startBacktick().toString()); } codeBlockNode.codeLines().forEach(codeLine -> docText.append(codeLine.codeDescription().toString())); docText.append(codeBlockNode.endBacktick().text()); bLangDocLine.text = docText.toString(); bLangDocLine.pos = getPosition(codeBlockNode.startLineHashToken()); documentationLines.add(bLangDocLine); } private String addReferencesAndReturnDocumentationText(LinkedList<BLangMarkdownReferenceDocumentation> references, NodeList<Node> docElements) { StringBuilder docText = new StringBuilder(); for (Node element : docElements) { if (element.kind() == SyntaxKind.BALLERINA_NAME_REFERENCE) { BLangMarkdownReferenceDocumentation bLangRefDoc = new BLangMarkdownReferenceDocumentation(); BallerinaNameReferenceNode balNameRefNode = (BallerinaNameReferenceNode) element; bLangRefDoc.pos = getPosition(balNameRefNode); Token startBacktick = balNameRefNode.startBacktick(); Node backtickContent = balNameRefNode.nameReference(); Token endBacktick = balNameRefNode.endBacktick(); String contentString = backtickContent.isMissing() ? "" : backtickContent.toString(); bLangRefDoc.referenceName = contentString; bLangRefDoc.type = DocumentationReferenceType.BACKTICK_CONTENT; Optional<Token> referenceType = balNameRefNode.referenceType(); referenceType.ifPresent( refType -> { bLangRefDoc.type = stringToRefType(refType.text()); docText.append(refType.toString()); } ); transformDocumentationBacktickContent(backtickContent, bLangRefDoc); docText.append(startBacktick.isMissing() ? "" : startBacktick.text()); docText.append(contentString); docText.append(endBacktick.isMissing() ? "" : endBacktick.text()); references.add(bLangRefDoc); } else if (element.kind() == SyntaxKind.DOCUMENTATION_DESCRIPTION) { Token docDescription = (Token) element; docText.append(docDescription.text()); } else if (element.kind() == SyntaxKind.INLINE_CODE_REFERENCE) { InlineCodeReferenceNode inlineCodeRefNode = (InlineCodeReferenceNode) element; docText.append(inlineCodeRefNode.startBacktick().text()); docText.append(inlineCodeRefNode.codeReference().text()); docText.append(inlineCodeRefNode.endBacktick().text()); } } return trimLeftAtMostOne(docText.toString()); } private String trimLeftAtMostOne(String text) { int countToStrip = 0; if (!text.isEmpty() && Character.isWhitespace(text.charAt(0))) { countToStrip = 1; } return text.substring(countToStrip); } private void transformDocumentationBacktickContent(Node backtickContent, BLangMarkdownReferenceDocumentation bLangRefDoc) { QualifiedNameReferenceNode qualifiedRef; SimpleNameReferenceNode simpleRef; switch (backtickContent.kind()) { case CODE_CONTENT: bLangRefDoc.hasParserWarnings = true; break; case QUALIFIED_NAME_REFERENCE: qualifiedRef = (QualifiedNameReferenceNode) backtickContent; bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text(); bLangRefDoc.identifier = qualifiedRef.identifier().text(); break; case SIMPLE_NAME_REFERENCE: simpleRef = (SimpleNameReferenceNode) backtickContent; bLangRefDoc.identifier = simpleRef.name().text(); break; case FUNCTION_CALL: Node funcName = (((FunctionCallExpressionNode) backtickContent).functionName()); if (funcName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { qualifiedRef = (QualifiedNameReferenceNode) funcName; bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text(); bLangRefDoc.identifier = qualifiedRef.identifier().text(); } else { simpleRef = (SimpleNameReferenceNode) funcName; bLangRefDoc.identifier = simpleRef.name().text(); } break; case METHOD_CALL: MethodCallExpressionNode methodCallExprNode = (MethodCallExpressionNode) backtickContent; bLangRefDoc.identifier = ((SimpleNameReferenceNode) methodCallExprNode.methodName()).name().text(); Node refName = methodCallExprNode.expression(); if (refName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { qualifiedRef = (QualifiedNameReferenceNode) refName; bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text(); bLangRefDoc.typeName = qualifiedRef.identifier().text(); } else { simpleRef = (SimpleNameReferenceNode) refName; bLangRefDoc.typeName = simpleRef.name().text(); } break; default: throw new IllegalArgumentException("Invalid backtick content transformation"); } if (bLangRefDoc.identifier != null) { bLangRefDoc.identifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.identifier); if (stringStartsWithSingleQuote(bLangRefDoc.identifier)) { bLangRefDoc.identifier = bLangRefDoc.identifier.substring(1); } } if (bLangRefDoc.qualifier != null) { bLangRefDoc.qualifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.qualifier); if (stringStartsWithSingleQuote(bLangRefDoc.qualifier)) { bLangRefDoc.qualifier = bLangRefDoc.qualifier.substring(1); } } } private DocumentationReferenceType stringToRefType(String refTypeName) { switch (refTypeName) { case "type": return DocumentationReferenceType.TYPE; case "service": return DocumentationReferenceType.SERVICE; case "variable": return DocumentationReferenceType.VARIABLE; case "var": return DocumentationReferenceType.VAR; case "annotation": return DocumentationReferenceType.ANNOTATION; case "module": return DocumentationReferenceType.MODULE; case "function": return DocumentationReferenceType.FUNCTION; case "parameter": return DocumentationReferenceType.PARAMETER; case "const": return DocumentationReferenceType.CONST; default: return DocumentationReferenceType.BACKTICK_CONTENT; } } private Object getIntegerLiteral(Node literal, String nodeValue, SyntaxKind sign) { SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind(); if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) { return parseLong(literal, nodeValue, nodeValue, 10, sign, DiagnosticErrorCode.INTEGER_TOO_SMALL, DiagnosticErrorCode.INTEGER_TOO_LARGE); } else if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(literal, nodeValue, processedNodeValue, 16, sign, DiagnosticErrorCode.HEXADECIMAL_TOO_SMALL, DiagnosticErrorCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(Node literal, String originalNodeValue, String processedNodeValue, int radix, SyntaxKind sign, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { Location pos = getPosition(literal); if (sign == SyntaxKind.MINUS_TOKEN) { pos = new BLangDiagnosticLocation(pos.lineRange().filePath(), pos.lineRange().startLine().line(), pos.lineRange().endLine().line(), pos.lineRange().startLine().offset() - 1, pos.lineRange().endLine().offset()); dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } private String getHexNodeValue(String value) { if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private String fillWithZeros(String str) { while (str.length() < 4) { str = "0".concat(str); } return str; } private void markVariableWithFlag(BLangVariable variable, Flag flag) { variable.flagSet.add(flag); switch (variable.getKind()) { case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; for (BLangVariable var : tupleVariable.memberVariables) { markVariableWithFlag(var, flag); } if (tupleVariable.restVariable != null) { markVariableWithFlag(tupleVariable.restVariable, flag); } break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; for (BLangRecordVariableKeyValue keyValue : recordVariable.variableList) { markVariableWithFlag(keyValue.getValue(), flag); } if (recordVariable.restParam != null) { markVariableWithFlag((BLangVariable) recordVariable.restParam, flag); } break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; BLangSimpleVariable message = errorVariable.message; if (message != null) { markVariableWithFlag(message, flag); } BLangVariable cause = errorVariable.cause; if (cause != null) { markVariableWithFlag(cause, flag); } errorVariable.detail.forEach(entry -> markVariableWithFlag(entry.valueBindingPattern, flag)); if (errorVariable.restDetail != null) { markVariableWithFlag(errorVariable.restDetail, flag); } break; } } private boolean isSimpleLiteral(SyntaxKind syntaxKind) { switch (syntaxKind) { case STRING_LITERAL: case NUMERIC_LITERAL: case BOOLEAN_LITERAL: case NIL_LITERAL: case NULL_LITERAL: return true; default: return false; } } static boolean isType(SyntaxKind nodeKind) { switch (nodeKind) { case RECORD_TYPE_DESC: case OBJECT_TYPE_DESC: case NIL_TYPE_DESC: case OPTIONAL_TYPE_DESC: case ARRAY_TYPE_DESC: case INT_TYPE_DESC: case BYTE_TYPE_DESC: case FLOAT_TYPE_DESC: case DECIMAL_TYPE_DESC: case STRING_TYPE_DESC: case BOOLEAN_TYPE_DESC: case XML_TYPE_DESC: case JSON_TYPE_DESC: case HANDLE_TYPE_DESC: case ANY_TYPE_DESC: case ANYDATA_TYPE_DESC: case NEVER_TYPE_DESC: case VAR_TYPE_DESC: case SERVICE_TYPE_DESC: case MAP_TYPE_DESC: case UNION_TYPE_DESC: case ERROR_TYPE_DESC: case STREAM_TYPE_DESC: case TABLE_TYPE_DESC: case FUNCTION_TYPE_DESC: case TUPLE_TYPE_DESC: case PARENTHESISED_TYPE_DESC: case READONLY_TYPE_DESC: case DISTINCT_TYPE_DESC: case INTERSECTION_TYPE_DESC: case SINGLETON_TYPE_DESC: case TYPE_REFERENCE_TYPE_DESC: return true; default: return false; } } private boolean isNumericLiteral(SyntaxKind syntaxKind) { switch (syntaxKind) { case NUMERIC_LITERAL: return true; default: return false; } } private boolean isPresent(Node node) { return node.kind() != SyntaxKind.NONE; } private boolean checkIfAnonymous(Node node) { SyntaxKind parentKind = node.parent().kind(); return parentKind != SyntaxKind.DISTINCT_TYPE_DESC && parentKind != SyntaxKind.TYPE_DEFINITION; } private boolean ifInLocalContext(Node parent) { while (parent != null) { if (parent instanceof StatementNode) { return true; } parent = parent.parent(); } return false; } private BLangType createAnonymousRecordType(RecordTypeDescriptorNode recordTypeDescriptorNode, BLangRecordTypeNode recordTypeNode) { BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); Location pos = getPosition(recordTypeDescriptorNode); String genName = anonymousModelHelper.getNextAnonymousTypeKey(this.packageID); IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName, null); typeDef.setName(anonTypeGenName); typeDef.flagSet.add(Flag.PUBLIC); typeDef.flagSet.add(Flag.ANONYMOUS); typeDef.typeNode = recordTypeNode; typeDef.pos = pos; addToTop(typeDef); return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name); } private BLangUserDefinedType createUserDefinedType(Location pos, BLangIdentifier pkgAlias, BLangIdentifier name) { BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); userDefinedType.pos = pos; userDefinedType.pkgAlias = pkgAlias; userDefinedType.typeName = name; return userDefinedType; } private boolean withinByteRange(Object num) { if (num instanceof Long) { return (Long) num <= 255 && (Long) num >= 0; } return false; } private class SimpleVarBuilder { private BLangIdentifier name; private BLangType type; private boolean isDeclaredWithVar; private Set<Flag> flags = new HashSet<>(); private boolean isFinal; private ExpressionNode expr; private Location pos; public BLangSimpleVariable build() { BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); bLSimpleVar.setName(this.name); bLSimpleVar.setTypeNode(this.type); bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar; bLSimpleVar.setTypeNode(this.type); bLSimpleVar.flagSet.addAll(this.flags); if (this.isFinal) { markVariableWithFlag(bLSimpleVar, Flag.FINAL); } bLSimpleVar.setInitialExpression(this.expr); bLSimpleVar.pos = pos; return bLSimpleVar; } public SimpleVarBuilder with(String name) { this.name = createIdentifier(null, name); return this; } public SimpleVarBuilder with(String name, Location identifierPos) { this.name = createIdentifier(identifierPos, name); return this; } public SimpleVarBuilder with(Token token) { this.name = createIdentifier(token); return this; } public SimpleVarBuilder setTypeByNode(Node typeName) { this.isDeclaredWithVar = typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC; if (typeName == null) { return this; } this.type = createTypeNode(typeName); return this; } public SimpleVarBuilder setExpressionByNode(Node initExprNode) { this.expr = initExprNode != null ? createExpression(initExprNode) : null; return this; } public SimpleVarBuilder setExpression(ExpressionNode expression) { this.expr = expression; return this; } public SimpleVarBuilder isDeclaredWithVar() { this.isDeclaredWithVar = true; return this; } public SimpleVarBuilder isFinal() { this.isFinal = true; return this; } public SimpleVarBuilder isListenerVar() { this.flags.add(Flag.LISTENER); this.flags.add(Flag.FINAL); return this; } public SimpleVarBuilder setVisibility(Token visibilityQualifier) { if (visibilityQualifier != null) { if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) { this.flags.add(Flag.PRIVATE); } else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) { this.flags.add(Flag.PUBLIC); } } return this; } public SimpleVarBuilder setFinal(boolean present) { this.isFinal = present; return this; } public SimpleVarBuilder setOptional(boolean present) { if (present) { this.flags.add(Flag.PUBLIC); } else { this.flags.remove(Flag.PUBLIC); } return this; } public SimpleVarBuilder setRequired(boolean present) { if (present) { this.flags.add(Flag.REQUIRED); } else { this.flags.remove(Flag.REQUIRED); } return this; } public SimpleVarBuilder isPublic() { this.flags.add(Flag.PUBLIC); return this; } public SimpleVarBuilder isWorkerVar() { this.flags.add(Flag.WORKER); return this; } public SimpleVarBuilder setPos(Location pos) { this.pos = pos; return this; } } private void addFinalQualifier(BLangSimpleVariable simpleVar) { simpleVar.flagSet.add(Flag.FINAL); } private void addToTop(TopLevelNode topLevelNode) { if (currentCompilationUnit != null) { currentCompilationUnit.addTopLevelNode(topLevelNode); } } private Location expandLeft(Location location, Location upTo) { assert location.lineRange().startLine().line() > upTo.lineRange().startLine().line() || (location.lineRange().startLine().line() == upTo.lineRange().startLine().line() && location.lineRange().startLine().offset() >= upTo.lineRange().startLine().offset()); Location expandedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(), upTo.lineRange().startLine().line(), location.lineRange().endLine().line(), upTo.lineRange().startLine().offset(), location.lineRange().endLine().offset()); return expandedLocation; } private Location trimLeft(Location location, Location upTo) { assert location.lineRange().startLine().line() < upTo.lineRange().startLine().line() || (location.lineRange().startLine().line() == upTo.lineRange().startLine().line() && location.lineRange().startLine().offset() <= upTo.lineRange().startLine().offset()); Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(), upTo.lineRange().startLine().line(), location.lineRange().endLine().line(), upTo.lineRange().startLine().offset(), location.lineRange().endLine().offset()); return trimmedLocation; } private Location trimRight(Location location, Location upTo) { assert location.lineRange().endLine().line() > upTo.lineRange().endLine().line() || (location.lineRange().endLine().line() == upTo.lineRange().endLine().line() && location.lineRange().endLine().offset() >= upTo.lineRange().endLine().offset()); Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(), location.lineRange().startLine().line(), upTo.lineRange().endLine().line(), location.lineRange().startLine().offset(), upTo.lineRange().endLine().offset()); return trimmedLocation; } private void setClassQualifiers(NodeList<Token> qualifiers, BLangClassDefinition blangClass) { for (Token qualifier : qualifiers) { SyntaxKind kind = qualifier.kind(); switch (kind) { case DISTINCT_KEYWORD: blangClass.flagSet.add(Flag.DISTINCT); break; case CLIENT_KEYWORD: blangClass.flagSet.add(Flag.CLIENT); break; case READONLY_KEYWORD: blangClass.flagSet.add(Flag.READONLY); break; case SERVICE_KEYWORD: blangClass.flagSet.add(Flag.SERVICE); break; case ISOLATED_KEYWORD: blangClass.flagSet.add(Flag.ISOLATED); break; default: throw new RuntimeException("Syntax kind is not supported: " + kind); } } } }
class BLangNodeTransformer extends NodeTransformer<BLangNode> { private static final String IDENTIFIER_LITERAL_PREFIX = "'"; private BLangDiagnosticLog dlog; private SymbolTable symTable; private PackageCache packageCache; private PackageID packageID; private String currentCompUnitName; private BLangCompilationUnit currentCompilationUnit; private BLangAnonymousModelHelper anonymousModelHelper; private BLangMissingNodesHelper missingNodesHelper; /* To keep track of additional statements produced from multi-BLangNode resultant transformations */ private Stack<BLangStatement> additionalStatements = new Stack<>(); /* To keep track if we are inside a block statment for the use of type definition creation */ private boolean isInLocalContext = false; public BLangNodeTransformer(CompilerContext context, PackageID packageID, String entryName) { this.dlog = BLangDiagnosticLog.getInstance(context); this.dlog.setCurrentPackageId(packageID); this.symTable = SymbolTable.getInstance(context); this.packageID = packageID; this.currentCompUnitName = entryName; this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); } public List<org.ballerinalang.model.tree.Node> accept(Node node) { BLangNode bLangNode = node.apply(this); List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>(); while (!additionalStatements.empty()) { nodes.add(additionalStatements.pop()); } nodes.add(bLangNode); return nodes; } @Override public BLangNode transform(IdentifierToken identifierToken) { return this.createIdentifier(getPosition(identifierToken), identifierToken); } private Optional<Node> getDocumentationString(Optional<MetadataNode> metadataNode) { return metadataNode.map(MetadataNode::documentationString).orElse(null); } private NodeList<AnnotationNode> getAnnotations(Optional<MetadataNode> metadataNode) { return metadataNode.map(MetadataNode::annotations).orElse(null); } private Location getPosition(Node node) { if (node == null) { return null; } LineRange lineRange = node.lineRange(); LinePosition startPos = lineRange.startLine(); LinePosition endPos = lineRange.endLine(); return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(), startPos.offset(), endPos.offset()); } private Location getPosition(Node startNode, Node endNode) { if (startNode == null || endNode == null) { return null; } LinePosition startPos = startNode.lineRange().startLine(); LinePosition endPos = endNode.lineRange().endLine(); return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(), startPos.offset(), endPos.offset()); } private Location getPositionWithoutMetadata(Node node) { if (node == null) { return null; } LineRange nodeLineRange = node.lineRange(); NonTerminalNode nonTerminalNode = (NonTerminalNode) node; ChildNodeList children = nonTerminalNode.children(); LinePosition startPos; if (children.get(0).kind() == SyntaxKind.METADATA) { startPos = children.get(1).lineRange().startLine(); } else { startPos = nodeLineRange.startLine(); } LinePosition endPos = nodeLineRange.endLine(); return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(), startPos.offset(), endPos.offset()); } @Override public BLangNode transform(ModulePartNode modulePart) { BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit(); this.currentCompilationUnit = compilationUnit; compilationUnit.name = currentCompUnitName; compilationUnit.setPackageID(packageID); Location pos = getPosition(modulePart); for (ImportDeclarationNode importDecl : modulePart.imports()) { BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this); bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName()); compilationUnit.addTopLevelNode(bLangImport); } for (ModuleMemberDeclarationNode member : modulePart.members()) { compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this)); } Location newLocation = new BLangDiagnosticLocation(pos.lineRange().filePath(), 0, 0, 0, 0); compilationUnit.pos = newLocation; compilationUnit.setPackageID(packageID); this.currentCompilationUnit = null; return compilationUnit; } @Override public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) { TypedBindingPatternNode typedBindingPattern = modVarDeclrNode.typedBindingPattern(); BindingPatternNode bindingPatternNode = typedBindingPattern.bindingPattern(); BLangVariable variable = getBLangVariableNode(bindingPatternNode); if (modVarDeclrNode.visibilityQualifier().isPresent()) { markVariableWithFlag(variable, Flag.PUBLIC); } initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), modVarDeclrNode.initializer(), modVarDeclrNode.qualifiers()); NodeList<AnnotationNode> annotations = getAnnotations(modVarDeclrNode.metadata()); if (annotations != null) { variable.annAttachments = applyAll(annotations); } variable.pos = getPositionWithoutMetadata(modVarDeclrNode); variable.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(modVarDeclrNode.metadata())); return variable; } @Override public BLangNode transform(ImportDeclarationNode importDeclaration) { ImportOrgNameNode orgNameNode = importDeclaration.orgName().orElse(null); Optional<ImportPrefixNode> prefixNode = importDeclaration.prefix(); Token prefix = prefixNode.isPresent() ? prefixNode.get().prefix() : null; Token orgName = null; if (orgNameNode != null) { orgName = orgNameNode.orgName(); } String version = null; List<BLangIdentifier> pkgNameComps = new ArrayList<>(); NodeList<IdentifierToken> names = importDeclaration.moduleName(); Location position = getPosition(importDeclaration); names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null))); BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode(); importDcl.pos = position; importDcl.pkgNameComps = pkgNameComps; importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName); importDcl.version = this.createIdentifier(null, version); importDcl.alias = (prefix != null) ? this.createIdentifier(getPosition(prefix), prefix) : pkgNameComps.get(pkgNameComps.size() - 1); return importDcl; } @Override public BLangNode transform(MethodDeclarationNode methodDeclarationNode) { BLangFunction bLFunction; if (methodDeclarationNode.relativeResourcePath().isEmpty()) { bLFunction = createFunctionNode(methodDeclarationNode.methodName(), methodDeclarationNode.qualifierList(), methodDeclarationNode.methodSignature(), null); } else { bLFunction = createResourceFunctionNode(methodDeclarationNode.methodName(), methodDeclarationNode.qualifierList(), methodDeclarationNode.relativeResourcePath(), methodDeclarationNode.methodSignature(), null); } bLFunction.annAttachments = applyAll(getAnnotations(methodDeclarationNode.metadata())); bLFunction.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(methodDeclarationNode.metadata())); bLFunction.pos = getPositionWithoutMetadata(methodDeclarationNode); return bLFunction; } @Override public BLangNode transform(ResourcePathParameterNode resourcePathParameterNode) { BLangSimpleVariable pathParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); pathParam.name = createIdentifier(resourcePathParameterNode.paramName()); BLangType typeNode = (BLangType) resourcePathParameterNode.typeDescriptor().apply(this); pathParam.pos = getPosition(resourcePathParameterNode); pathParam.annAttachments = applyAll(resourcePathParameterNode.annotations()); if (resourcePathParameterNode.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) { BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode(); arrayTypeNode.elemtype = typeNode; arrayTypeNode.dimensions = 1; typeNode = arrayTypeNode; } pathParam.typeNode = typeNode; return pathParam; } private BLangFunction createResourceFunctionNode(IdentifierToken accessorName, NodeList<Token> qualifierList, NodeList<Node> relativeResourcePath, FunctionSignatureNode methodSignature, FunctionBodyNode functionBody) { BLangResourceFunction bLFunction = (BLangResourceFunction) TreeBuilder.createResourceFunctionNode(); String resourceFuncName = calculateResourceFunctionName(accessorName, relativeResourcePath); BLangIdentifier name = createIdentifier(getPosition(accessorName), resourceFuncName); populateFunctionNode(name, qualifierList, methodSignature, functionBody, bLFunction); bLFunction.methodName = createIdentifier(accessorName); bLFunction.resourcePath = new ArrayList<>(); List<BLangSimpleVariable> params = new ArrayList<>(); for (Node pathSegment : relativeResourcePath) { switch (pathSegment.kind()) { case SLASH_TOKEN: continue; case RESOURCE_PATH_SEGMENT_PARAM: BLangSimpleVariable param = (BLangSimpleVariable) pathSegment.apply(this); params.add(param); bLFunction.addPathParam(param); bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "*")); break; case RESOURCE_PATH_REST_PARAM: BLangSimpleVariable restParam = (BLangSimpleVariable) pathSegment.apply(this); params.add(restParam); bLFunction.setRestPathParam(restParam); bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "**")); break; default: bLFunction.resourcePath.add(createIdentifier((Token) pathSegment)); break; } } bLFunction.getParameters().addAll(0, params); return bLFunction; } private String calculateResourceFunctionName(IdentifierToken accessorName, NodeList<Node> relativeResourcePath) { StringBuilder sb = new StringBuilder(); sb.append("$"); sb.append(createIdentifier(accessorName).getValue()); for (Node token : relativeResourcePath) { switch (token.kind()) { case SLASH_TOKEN: continue; case RESOURCE_PATH_SEGMENT_PARAM: sb.append("$*"); break; case RESOURCE_PATH_REST_PARAM: sb.append("$**"); break; default: sb.append("$"); String value = createIdentifier((Token) token).getValue(); sb.append(value); } } return sb.toString(); } @Override public BLangNode transform(ConstantDeclarationNode constantDeclarationNode) { BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode(); Location pos = getPositionWithoutMetadata(constantDeclarationNode); Location identifierPos = getPosition(constantDeclarationNode.variableName()); constantNode.name = createIdentifier(identifierPos, constantDeclarationNode.variableName()); constantNode.expr = createExpression(constantDeclarationNode.initializer()); constantNode.pos = pos; if (constantDeclarationNode.typeDescriptor().isPresent()) { constantNode.typeNode = createTypeNode(constantDeclarationNode.typeDescriptor().orElse(null)); } constantNode.annAttachments = applyAll(getAnnotations(constantDeclarationNode.metadata())); constantNode.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(constantDeclarationNode.metadata())); constantNode.flagSet.add(Flag.CONSTANT); if (constantDeclarationNode.visibilityQualifier().isPresent() && constantDeclarationNode.visibilityQualifier().orElse(null).kind() == SyntaxKind.PUBLIC_KEYWORD) { constantNode.flagSet.add(Flag.PUBLIC); } NodeKind nodeKind = constantNode.expr.getKind(); if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) { BLangLiteral literal = nodeKind == NodeKind.LITERAL ? (BLangLiteral) TreeBuilder.createLiteralExpression() : (BLangLiteral) TreeBuilder.createNumericLiteralExpression(); literal.setValue(((BLangLiteral) constantNode.expr).value); literal.setBType(constantNode.expr.getBType()); literal.isConstant = true; BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); finiteTypeNode.valueSpace.add(literal); BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(identifierPos, genName); typeDef.setName(anonTypeGenName); typeDef.flagSet.add(Flag.PUBLIC); typeDef.flagSet.add(Flag.ANONYMOUS); typeDef.typeNode = finiteTypeNode; typeDef.pos = pos; constantNode.associatedTypeDefinition = typeDef; } return constantNode; } public BLangNode transform(TypeDefinitionNode typeDefNode) { BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); BLangIdentifier identifierNode = this.createIdentifier(typeDefNode.typeName()); typeDef.setName(identifierNode); typeDef.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(typeDefNode.metadata())); typeDef.typeNode = createTypeNode(typeDefNode.typeDescriptor()); typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> { if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) { typeDef.flagSet.add(Flag.PUBLIC); } }); typeDef.pos = getPositionWithoutMetadata(typeDefNode); typeDef.annAttachments = applyAll(getAnnotations(typeDefNode.metadata())); return typeDef; } @Override private List<TypeDescriptorNode> flattenUnionType(UnionTypeDescriptorNode unionTypeDescriptorNode) { List<TypeDescriptorNode> list = new ArrayList<>(); flattenUnionType(list, unionTypeDescriptorNode); return list; } private void flattenUnionType(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescriptorNode) { if (typeDescriptorNode.kind() != SyntaxKind.UNION_TYPE_DESC) { list.add(typeDescriptorNode); return; } UnionTypeDescriptorNode unionTypeDescriptorNode = (UnionTypeDescriptorNode) typeDescriptorNode; updateListWithNonUnionTypes(list, unionTypeDescriptorNode.leftTypeDesc()); updateListWithNonUnionTypes(list, unionTypeDescriptorNode.rightTypeDesc()); } private void updateListWithNonUnionTypes(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescNode) { if (typeDescNode.kind() != SyntaxKind.UNION_TYPE_DESC) { list.add(typeDescNode); } else { flattenUnionType(list, typeDescNode); } } private <T> void reverseFlatMap(List<List<T>> listOfLists, List<T> result) { for (int i = listOfLists.size() - 1; i >= 0; i--) { result.addAll(listOfLists.get(i)); } } private BLangUserDefinedType deSugarTypeAsUserDefType(BLangType toIndirect) { BLangTypeDefinition bLTypeDef = createTypeDefinitionWithTypeNode(toIndirect); Location pos = toIndirect.pos; addToTop(bLTypeDef); return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), bLTypeDef.name); } private BLangTypeDefinition createTypeDefinitionWithTypeNode(BLangType toIndirect) { Location pos = toIndirect.pos; BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName); bLTypeDef.setName(anonTypeGenName); bLTypeDef.flagSet.add(Flag.PUBLIC); bLTypeDef.flagSet.add(Flag.ANONYMOUS); bLTypeDef.typeNode = toIndirect; bLTypeDef.pos = pos; return bLTypeDef; } @Override public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) { BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc()); typeNode.grouped = true; return typeNode; } @Override public BLangNode transform(TypeParameterNode typeParameterNode) { return createTypeNode(typeParameterNode.typeNode()); } @Override public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) { BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode(); SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc(); for (int i = 0; i < types.size(); i++) { Node node = types.get(i); if (node.kind() == SyntaxKind.REST_TYPE) { RestDescriptorNode restDescriptor = (RestDescriptorNode) node; tupleTypeNode.restParamType = createTypeNode(restDescriptor.typeDescriptor()); } else { tupleTypeNode.memberTypeNodes.add(createTypeNode(node)); } } tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode); return tupleTypeNode; } @Override public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) { if (parameterizedTypeDescNode.kind() == SyntaxKind.ERROR_TYPE_DESC) { return transformErrorTypeDescriptor(parameterizedTypeDescNode); } BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = getParameterizedTypeKind(parameterizedTypeDescNode.kind()); refType.pos = getPosition(parameterizedTypeDescNode); Optional<TypeParameterNode> typeParam = parameterizedTypeDescNode.typeParamNode(); if (typeParam.isPresent()) { BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode(); constrainedType.type = refType; constrainedType.constraint = createTypeNode(typeParam.get().typeNode()); constrainedType.pos = refType.pos; return constrainedType; } return refType; } private TypeKind getParameterizedTypeKind(SyntaxKind syntaxKind) { switch (syntaxKind) { case TYPEDESC_TYPE_DESC: return TypeKind.TYPEDESC; case FUTURE_TYPE_DESC: return TypeKind.FUTURE; case XML_TYPE_DESC: default: return TypeKind.XML; } } private BLangNode transformErrorTypeDescriptor(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) { BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode(); Optional<TypeParameterNode> typeParam = parameterizedTypeDescNode.typeParamNode(); errorType.pos = getPosition(parameterizedTypeDescNode); if (typeParam.isPresent()) { TypeParameterNode typeNode = typeParam.get(); errorType.detailType = createTypeNode(typeNode); } NonTerminalNode parent = parameterizedTypeDescNode.parent(); boolean isDistinctError = parent.kind() == SyntaxKind.DISTINCT_TYPE_DESC; if (isDistinctError) { parent = parent.parent(); } errorType.isAnonymous = checkIfAnonymous(parameterizedTypeDescNode); errorType.isLocal = this.isInLocalContext; if (parent.kind() != SyntaxKind.TYPE_DEFINITION && (isDistinctError || (!errorType.isLocal && typeParam.isPresent()))) { return deSugarTypeAsUserDefType(errorType); } return errorType; } private boolean isAnonymousTypeNode(TypeParameterNode typeNode) { SyntaxKind paramKind = typeNode.typeNode().kind(); if (paramKind == SyntaxKind.RECORD_TYPE_DESC || paramKind == SyntaxKind.OBJECT_TYPE_DESC || paramKind == SyntaxKind.ERROR_TYPE_DESC) { return checkIfAnonymous(typeNode); } return false; } @Override public BLangNode transform(DistinctTypeDescriptorNode distinctTypeDesc) { BLangType typeNode = createTypeNode(distinctTypeDesc.typeDescriptor()); typeNode.flagSet.add(Flag.DISTINCT); return typeNode; } @Override public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) { BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode(); for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.CLIENT_KEYWORD) { objectTypeNode.flagSet.add(Flag.CLIENT); continue; } if (kind == SyntaxKind.SERVICE_KEYWORD) { objectTypeNode.flagSet.add(SERVICE); continue; } if (kind == SyntaxKind.ISOLATED_KEYWORD) { objectTypeNode.flagSet.add(ISOLATED); continue; } throw new RuntimeException("Syntax kind is not supported: " + kind); } NodeList<Node> members = objTypeDescNode.members(); for (Node node : members) { BLangNode bLangNode = node.apply(this); if (bLangNode.getKind() == NodeKind.FUNCTION) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) { if (objectTypeNode.initFunction == null) { bLangFunction.objInitFunction = true; objectTypeNode.initFunction = bLangFunction; } else { objectTypeNode.addFunction(bLangFunction); } } else { objectTypeNode.addFunction(bLangFunction); } } else if (bLangNode.getKind() == NodeKind.RESOURCE_FUNC) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); objectTypeNode.addFunction(bLangFunction); dlog.error(getPosition(node), DiagnosticErrorCode.OBJECT_TYPE_DEF_DOES_NOT_ALLOW_RESOURCE_FUNC_DECL); } else if (bLangNode.getKind() == NodeKind.VARIABLE) { objectTypeNode.addField((BLangSimpleVariable) bLangNode); } else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) { objectTypeNode.addTypeReference((BLangType) bLangNode); } } objectTypeNode.pos = getPosition(objTypeDescNode); if (members.size() > 0) { objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(members.get(0))); objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(members.get(members.size() - 1))); } else { objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(objTypeDescNode.closeBrace())); objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(objTypeDescNode.openBrace())); } boolean isAnonymous = checkIfAnonymous(objTypeDescNode); objectTypeNode.isAnonymous = isAnonymous; if (!isAnonymous) { return objectTypeNode; } return deSugarTypeAsUserDefType(objectTypeNode); } public BLangClassDefinition transformObjectCtorExpressionBody(NodeList<Node> members) { BLangClassDefinition classDefinition = (BLangClassDefinition) TreeBuilder.createClassDefNode(); classDefinition.flagSet.add(Flag.ANONYMOUS); classDefinition.flagSet.add(Flag.OBJECT_CTOR); for (Node node : members) { BLangNode bLangNode = node.apply(this); NodeKind nodeKind = bLangNode.getKind(); if (nodeKind == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); if (!Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) { classDefinition.addFunction(bLangFunction); continue; } if (classDefinition.initFunction != null) { classDefinition.addFunction(bLangFunction); continue; } if (bLangFunction.requiredParams.size() != 0) { dlog.error(bLangFunction.pos, DiagnosticErrorCode.OBJECT_CTOR_INIT_CANNOT_HAVE_PARAMETERS); continue; } bLangFunction.objInitFunction = true; classDefinition.initFunction = bLangFunction; } else if (nodeKind == NodeKind.VARIABLE) { classDefinition.addField((BLangSimpleVariable) bLangNode); } else if (nodeKind == NodeKind.USER_DEFINED_TYPE) { dlog.error(bLangNode.pos, DiagnosticErrorCode.OBJECT_CTOR_DOES_NOT_SUPPORT_TYPE_REFERENCE_MEMBERS); } } classDefinition.internal = true; return classDefinition; } /** * Object constructor expression creates a class definition for the type defined through the object constructor. * Then add the class definition as a top level node. Using the class definition initialize the object defined in * the object constructor. Therefore this can be considered as a desugar. * example: * var objVariable = object { int n; }; * * class anonType0 { int n; } * var objVariable = new anonType0(); * * @param objectConstructorExpressionNode object ctor expression node * @return BLangTypeInit node which initialize the class definition */ @Override public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) { Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode); BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members()); anonClass.pos = pos; BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression(); objectCtorExpression.pos = pos; objectCtorExpression.classNode = anonClass; String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(pos, genName); anonClass.setName(anonTypeGenName); anonClass.flagSet.add(Flag.PUBLIC); Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference(); typeReference.ifPresent(typeReferenceNode -> { objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode)); }); anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations()); addToTop(anonClass); NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers(); for (Token qualifier : objectConstructorQualifierList) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.CLIENT_KEYWORD) { anonClass.flagSet.add(Flag.CLIENT); objectCtorExpression.isClient = true; } else if (kind == SyntaxKind.ISOLATED_KEYWORD) { anonClass.flagSet.add(Flag.ISOLATED); } else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) { anonClass.flagSet.add(SERVICE); objectCtorExpression.isService = true; } else { throw new RuntimeException("Syntax kind is not supported: " + kind); } } BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode(); BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name); BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = pos; initNode.userDefinedType = userDefinedType; BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier pkgAlias = createIdentifier(pos, ""); BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName)); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; objectCtorExpression.typeInit = initNode; return objectCtorExpression; } @Override public BLangNode transform(ObjectFieldNode objFieldNode) { BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(), objFieldNode.expression().orElse(null), objFieldNode.visibilityQualifier().orElse(null), getAnnotations(objFieldNode.metadata())); Optional<Node> doc = getDocumentationString(objFieldNode.metadata()); simpleVar.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc); NodeList<Token> qualifierList = objFieldNode.qualifierList(); for (Token token : qualifierList) { if (token.kind() == SyntaxKind.FINAL_KEYWORD) { addFinalQualifier(simpleVar); } else if (token.kind() == SyntaxKind.RESOURCE_KEYWORD) { addResourceQualifier(simpleVar); } } simpleVar.flagSet.add(Flag.FIELD); simpleVar.pos = getPositionWithoutMetadata(objFieldNode); return simpleVar; } private void addResourceQualifier(BLangSimpleVariable simpleVar) { simpleVar.flagSet.add(Flag.RESOURCE); } @Override public BLangNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) { BLangExprFunctionBody bLExprFunctionBody = (BLangExprFunctionBody) TreeBuilder.createExprFunctionBodyNode(); bLExprFunctionBody.expr = createExpression(expressionFunctionBodyNode.expression()); bLExprFunctionBody.pos = getPosition(expressionFunctionBodyNode); return bLExprFunctionBody; } @Override public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) { BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode(); boolean hasRestField = false; boolean isAnonymous = checkIfAnonymous(recordTypeDescriptorNode); for (Node field : recordTypeDescriptorNode.fields()) { if (field.kind() == SyntaxKind.RECORD_FIELD) { BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this); Optional<Node> doc = getDocumentationString(((RecordFieldNode) field).metadata()); bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc); recordTypeNode.fields.add(bLFiled); } else if (field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) { BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this); Optional<Node> doc = getDocumentationString(((RecordFieldWithDefaultValueNode) field).metadata()); bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc); recordTypeNode.fields.add(bLFiled); } else { recordTypeNode.addTypeReference(createTypeNode(field)); } } Optional<RecordRestDescriptorNode> recordRestDesc = recordTypeDescriptorNode.recordRestDescriptor(); if (recordRestDesc.isPresent()) { recordTypeNode.restFieldType = createTypeNode(recordRestDesc.get()); hasRestField = true; } boolean isOpen = recordTypeDescriptorNode.bodyStartDelimiter().kind() == SyntaxKind.OPEN_BRACE_TOKEN; recordTypeNode.sealed = !(hasRestField || isOpen); recordTypeNode.pos = getPosition(recordTypeDescriptorNode); recordTypeNode.isAnonymous = isAnonymous; recordTypeNode.isLocal = this.isInLocalContext; if (!isAnonymous || this.isInLocalContext) { return recordTypeNode; } return createAnonymousRecordType(recordTypeDescriptorNode, recordTypeNode); } @Override public BLangNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) { BLangFiniteTypeNode bLangFiniteTypeNode = new BLangFiniteTypeNode(); BLangLiteral simpleLiteral = createSimpleLiteral(singletonTypeDescriptorNode.simpleContExprNode()); bLangFiniteTypeNode.pos = simpleLiteral.pos; bLangFiniteTypeNode.valueSpace.add(simpleLiteral); return bLangFiniteTypeNode; } @Override public BLangNode transform(BuiltinSimpleNameReferenceNode singletonTypeDescriptorNode) { return createTypeNode(singletonTypeDescriptorNode); } @Override public BLangNode transform(TypeReferenceNode typeReferenceNode) { return createTypeNode(typeReferenceNode.typeName()); } @Override public BLangNode transform(RecordFieldNode recordFieldNode) { BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(), getAnnotations(recordFieldNode.metadata())); simpleVar.flagSet.add(Flag.PUBLIC); if (recordFieldNode.questionMarkToken().isPresent()) { simpleVar.flagSet.add(Flag.OPTIONAL); } else { simpleVar.flagSet.add(Flag.REQUIRED); } simpleVar.flagSet.add(Flag.FIELD); addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar); simpleVar.pos = getPositionWithoutMetadata(recordFieldNode); return simpleVar; } @Override public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) { BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(), getAnnotations(recordFieldNode.metadata())); simpleVar.flagSet.add(Flag.PUBLIC); if (isPresent(recordFieldNode.expression())) { simpleVar.setInitialExpression(createExpression(recordFieldNode.expression())); } addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar); simpleVar.pos = getPositionWithoutMetadata(recordFieldNode); return simpleVar; } private void addReadOnlyQualifier(Optional<Token> readonlyKeyword, BLangSimpleVariable simpleVar) { if (readonlyKeyword.isPresent()) { simpleVar.flagSet.add(Flag.READONLY); } } @Override public BLangNode transform(RecordRestDescriptorNode recordFieldNode) { return createTypeNode(recordFieldNode.typeName()); } @Override public BLangNode transform(FunctionDefinitionNode funcDefNode) { BLangFunction bLFunction; if (funcDefNode.relativeResourcePath().isEmpty()) { bLFunction = createFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(), funcDefNode.functionSignature(), funcDefNode.functionBody()); } else { bLFunction = createResourceFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(), funcDefNode.relativeResourcePath(), funcDefNode.functionSignature(), funcDefNode.functionBody()); } bLFunction.annAttachments = applyAll(getAnnotations(funcDefNode.metadata())); bLFunction.pos = getPositionWithoutMetadata(funcDefNode); bLFunction.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(funcDefNode.metadata())); return bLFunction; } private BLangFunction createFunctionNode(IdentifierToken funcName, NodeList<Token> qualifierList, FunctionSignatureNode functionSignature, FunctionBodyNode functionBody) { BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode(); BLangIdentifier name = createIdentifier(getPosition(funcName), funcName); populateFunctionNode(name, qualifierList, functionSignature, functionBody, bLFunction); return bLFunction; } private void populateFunctionNode(BLangIdentifier name, NodeList<Token> qualifierList, FunctionSignatureNode functionSignature, FunctionBodyNode functionBody, BLangFunction bLFunction) { bLFunction.name = name; setFunctionQualifiers(bLFunction, qualifierList); populateFuncSignature(bLFunction, functionSignature); if (functionBody == null) { bLFunction.body = null; bLFunction.flagSet.add(Flag.INTERFACE); bLFunction.interfaceFunction = true; } else { bLFunction.body = (BLangFunctionBody) functionBody.apply(this); if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) { bLFunction.flagSet.add(Flag.NATIVE); } } } private void setFunctionQualifiers(BLangFunction bLFunction, NodeList<Token> qualifierList) { for (Token qualifier : qualifierList) { switch (qualifier.kind()) { case PUBLIC_KEYWORD: bLFunction.flagSet.add(Flag.PUBLIC); break; case PRIVATE_KEYWORD: bLFunction.flagSet.add(Flag.PRIVATE); break; case REMOTE_KEYWORD: bLFunction.flagSet.add(Flag.REMOTE); break; case TRANSACTIONAL_KEYWORD: bLFunction.flagSet.add(Flag.TRANSACTIONAL); break; case RESOURCE_KEYWORD: bLFunction.flagSet.add(Flag.RESOURCE); break; case ISOLATED_KEYWORD: bLFunction.flagSet.add(Flag.ISOLATED); break; default: continue; } } } @Override public BLangNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) { BLangExternalFunctionBody externFunctionBodyNode = (BLangExternalFunctionBody) TreeBuilder.createExternFunctionBodyNode(); externFunctionBodyNode.annAttachments = applyAll(externalFunctionBodyNode.annotations()); externFunctionBodyNode.pos = getPosition(externalFunctionBodyNode); return externFunctionBodyNode; } @Override public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) { BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode(); Location pos = getPosition(anonFuncExprNode); bLFunction.name = createIdentifier(symTable.builtinPos, anonymousModelHelper.getNextAnonymousFunctionKey(packageID)); populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature()); bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this); bLFunction.pos = pos; bLFunction.addFlag(Flag.LAMBDA); bLFunction.addFlag(Flag.ANONYMOUS); setFunctionQualifiers(bLFunction, anonFuncExprNode.qualifierList()); addToTop(bLFunction); BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaExpr.function = bLFunction; lambdaExpr.pos = pos; return lambdaExpr; } @Override public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) { BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); this.isInLocalContext = true; List<BLangStatement> statements = new ArrayList<>(); if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) { NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get(); generateAndAddBLangStatements(namedWorkerDeclarator.workerInitStatements(), statements); for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) { statements.add((BLangStatement) workerDeclarationNode.apply(this)); while (!this.additionalStatements.empty()) { statements.add(additionalStatements.pop()); } } } generateAndAddBLangStatements(functionBodyBlockNode.statements(), statements); bLFuncBody.stmts = statements; bLFuncBody.pos = getPosition(functionBodyBlockNode); this.isInLocalContext = false; return bLFuncBody; } @Override public BLangNode transform(ForEachStatementNode forEachStatementNode) { BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = getPosition(forEachStatementNode); TypedBindingPatternNode typedBindingPatternNode = forEachStatementNode.typedBindingPattern(); VariableDefinitionNode variableDefinitionNode = createBLangVarDef(getPosition(typedBindingPatternNode), typedBindingPatternNode, Optional.empty(), Optional.empty()); foreach.setVariableDefinitionNode(variableDefinitionNode); foreach.isDeclaredWithVar = typedBindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; BLangBlockStmt foreachBlock = (BLangBlockStmt) forEachStatementNode.blockStatement().apply(this); foreachBlock.pos = getPosition(forEachStatementNode.blockStatement()); foreach.setBody(foreachBlock); foreach.setCollection(createExpression(forEachStatementNode.actionOrExpressionNode())); forEachStatementNode.onFailClause().ifPresent(onFailClauseNode -> { foreach.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return foreach; } @Override public BLangNode transform(ForkStatementNode forkStatementNode) { BLangForkJoin forkJoin = (BLangForkJoin) TreeBuilder.createForkJoinNode(); Location forkStmtPos = getPosition(forkStatementNode); forkJoin.pos = forkStmtPos; return forkJoin; } @Override public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) { BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode(); Location workerBodyPos = getPosition(namedWorkerDeclNode.workerBody()); bLFunction.name = createIdentifier(symTable.builtinPos, anonymousModelHelper.getNextAnonymousFunctionKey(packageID)); BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this); BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); bodyNode.stmts = blockStmt.stmts; bodyNode.pos = workerBodyPos; bLFunction.body = bodyNode; bLFunction.internal = true; bLFunction.pos = workerBodyPos; bLFunction.addFlag(Flag.LAMBDA); bLFunction.addFlag(Flag.ANONYMOUS); bLFunction.addFlag(Flag.WORKER); if (namedWorkerDeclNode.transactionalKeyword().isPresent()) { bLFunction.addFlag(Flag.TRANSACTIONAL); } String workerName = namedWorkerDeclNode.workerName().text(); if (namedWorkerDeclNode.workerName().isMissing() || workerName.equals(IDENTIFIER_LITERAL_PREFIX)) { workerName = missingNodesHelper.getNextMissingNodeName(packageID); } if (workerName.startsWith(IDENTIFIER_LITERAL_PREFIX)) { bLFunction.defaultWorkerName.originalValue = workerName; workerName = IdentifierUtils.unescapeUnicodeCodepoints(workerName.substring(1)); } bLFunction.defaultWorkerName.value = workerName; bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName()); NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations(); bLFunction.annAttachments = applyAll(annotations); Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc(); if (retNode.isPresent()) { ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get(); bLFunction.setReturnTypeNode(createTypeNode(returnType.type())); } else { BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode(); bLValueType.pos = getPosition(namedWorkerDeclNode); bLValueType.typeKind = TypeKind.NIL; bLFunction.setReturnTypeNode(bLValueType); } addToTop(bLFunction); BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaExpr.function = bLFunction; lambdaExpr.pos = workerBodyPos; lambdaExpr.internal = true; String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName; Location workerNamePos = getPosition(namedWorkerDeclNode.workerName()); BLangSimpleVariable var = new SimpleVarBuilder() .with(workerLambdaName, workerNamePos) .setExpression(lambdaExpr) .isDeclaredWithVar() .isFinal() .build(); if (namedWorkerDeclNode.transactionalKeyword().isPresent()) { var.addFlag(Flag.TRANSACTIONAL); } BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode(); lamdaWrkr.pos = workerBodyPos; var.pos = workerBodyPos; lamdaWrkr.setVariable(var); lamdaWrkr.isWorker = true; lamdaWrkr.internal = var.internal = true; if (namedWorkerDeclNode.parent().kind() == SyntaxKind.FORK_STATEMENT) { lamdaWrkr.isInFork = true; lamdaWrkr.var.flagSet.add(Flag.FORKED); } BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation(); BLangIdentifier nameInd = this.createIdentifier(workerNamePos, workerLambdaName); BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(), nameInd); bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias; bLInvocation.name = (BLangIdentifier) reference.name; bLInvocation.pos = workerNamePos; bLInvocation.flagSet = new HashSet<>(); bLInvocation.annAttachments = bLFunction.annAttachments; if (bLInvocation.getKind() == NodeKind.INVOCATION) { bLInvocation.async = true; } else { dlog.error(workerBodyPos, DiagnosticErrorCode.START_REQUIRE_INVOCATION); } BLangSimpleVariable invoc = new SimpleVarBuilder() .with(workerName, workerNamePos) .isDeclaredWithVar() .isWorkerVar() .setExpression(bLInvocation) .isFinal() .setPos(workerNamePos) .build(); BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode(); workerInvoc.pos = workerNamePos; workerInvoc.setVariable(invoc); workerInvoc.isWorker = true; invoc.flagSet.add(Flag.WORKER); this.additionalStatements.push(workerInvoc); return lamdaWrkr; } private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) { ArrayList<A> annAttachments = new ArrayList<>(); if (annotations == null) { return annAttachments; } for (B annotation : annotations) { A blNode = (A) annotation.apply(this); annAttachments.add(blNode); } return annAttachments; } @Override public BLangNode transform(AnnotationNode annotation) { Node name = annotation.annotReference(); BLangAnnotationAttachment bLAnnotationAttachment = (BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode(); if (annotation.annotValue().isPresent()) { MappingConstructorExpressionNode map = annotation.annotValue().get(); BLangExpression bLExpression = (BLangExpression) map.apply(this); bLAnnotationAttachment.setExpression(bLExpression); } BLangNameReference nameReference = createBLangNameReference(name); bLAnnotationAttachment.setAnnotationName(nameReference.name); bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias); bLAnnotationAttachment.pos = getPosition(annotation); return bLAnnotationAttachment; } @Override public BLangNode transform(QueryActionNode queryActionNode) { BLangQueryAction bLQueryAction = (BLangQueryAction) TreeBuilder.createQueryActionNode(); BLangDoClause doClause = (BLangDoClause) TreeBuilder.createDoClauseNode(); doClause.body = (BLangBlockStmt) queryActionNode.blockStatement().apply(this); doClause.body.pos = expandLeft(doClause.body.pos, getPosition(queryActionNode.doKeyword())); doClause.pos = doClause.body.pos; bLQueryAction.queryClauseList.add(queryActionNode.queryPipeline().fromClause().apply(this)); bLQueryAction.queryClauseList.addAll(applyAll(queryActionNode.queryPipeline().intermediateClauses())); bLQueryAction.queryClauseList.add(doClause); bLQueryAction.doClause = doClause; bLQueryAction.pos = getPosition(queryActionNode); return bLQueryAction; } @Override public BLangNode transform(AnnotationDeclarationNode annotationDeclarationNode) { BLangAnnotation annotationDecl = (BLangAnnotation) TreeBuilder.createAnnotationNode(); Location pos = getPositionWithoutMetadata(annotationDeclarationNode); annotationDecl.pos = pos; annotationDecl.name = createIdentifier(annotationDeclarationNode.annotationTag()); if (annotationDeclarationNode.visibilityQualifier().isPresent()) { annotationDecl.addFlag(Flag.PUBLIC); } if (annotationDeclarationNode.constKeyword().isPresent()) { annotationDecl.addFlag(Flag.CONSTANT); } annotationDecl.annAttachments = applyAll(getAnnotations(annotationDeclarationNode.metadata())); annotationDecl.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(annotationDeclarationNode.metadata())); Optional<Node> typedesc = annotationDeclarationNode.typeDescriptor(); if (typedesc.isPresent()) { annotationDecl.typeNode = createTypeNode(typedesc.get()); } SeparatedNodeList<Node> paramList = annotationDeclarationNode.attachPoints(); for (Node child : paramList) { AnnotationAttachPointNode attachPoint = (AnnotationAttachPointNode) child; boolean source = attachPoint.sourceKeyword().isPresent(); AttachPoint bLAttachPoint; NodeList<Token> idents = attachPoint.identifiers(); Token firstIndent = idents.get(0); switch (firstIndent.kind()) { case OBJECT_KEYWORD: Token secondIndent = idents.get(1); switch (secondIndent.kind()) { case FUNCTION_KEYWORD: bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_METHOD.getValue(), source); break; case FIELD_KEYWORD: bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_FIELD.getValue(), source); break; default: throw new RuntimeException("Syntax kind is not supported: " + secondIndent.kind()); } break; case SERVICE_KEYWORD: String value; if (idents.size() == 1) { value = AttachPoint.Point.SERVICE.getValue(); } else if (idents.size() == 3) { value = AttachPoint.Point.SERVICE_REMOTE.getValue(); } else { throw new RuntimeException("Invalid annotation attach point"); } bLAttachPoint = AttachPoint.getAttachmentPoint(value, source); break; case RECORD_KEYWORD: bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.RECORD_FIELD.getValue(), source); break; default: bLAttachPoint = AttachPoint.getAttachmentPoint(firstIndent.text(), source); } annotationDecl.addAttachPoint(bLAttachPoint); } return annotationDecl; } @Override public BLangNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) { BLangAnnotAccessExpr annotAccessExpr = (BLangAnnotAccessExpr) TreeBuilder.createAnnotAccessExpressionNode(); Node annotTagReference = annotAccessExpressionNode.annotTagReference(); if (annotAccessExpressionNode.annotTagReference().kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) { SimpleNameReferenceNode annotName = (SimpleNameReferenceNode) annotTagReference; annotAccessExpr.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); annotAccessExpr.annotationName = createIdentifier(annotName.name()); } else { QualifiedNameReferenceNode qulifiedName = (QualifiedNameReferenceNode) annotTagReference; annotAccessExpr.pkgAlias = createIdentifier(qulifiedName.modulePrefix()); annotAccessExpr.annotationName = createIdentifier(qulifiedName.identifier()); } annotAccessExpr.pos = getPosition(annotAccessExpressionNode); annotAccessExpr.expr = createExpression(annotAccessExpressionNode.expression()); return annotAccessExpr; } @Override public BLangNode transform(ConditionalExpressionNode conditionalExpressionNode) { BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode(); ternaryExpr.pos = getPosition(conditionalExpressionNode); ternaryExpr.elseExpr = createExpression(conditionalExpressionNode.endExpression()); ternaryExpr.thenExpr = createExpression(conditionalExpressionNode.middleExpression()); ternaryExpr.expr = createExpression(conditionalExpressionNode.lhsExpression()); if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) { BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr; BLangTernaryExpr parent = root; while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) { parent = (BLangTernaryExpr) parent.elseExpr; } ternaryExpr.expr = parent.elseExpr; parent.elseExpr = ternaryExpr; ternaryExpr = root; } return ternaryExpr; } @Override public BLangNode transform(CheckExpressionNode checkExpressionNode) { Location pos = getPosition(checkExpressionNode); BLangExpression expr = createExpression(checkExpressionNode.expression()); if (checkExpressionNode.checkKeyword().kind() == SyntaxKind.CHECK_KEYWORD) { return createCheckExpr(pos, expr); } return createCheckPanickedExpr(pos, expr); } @Override public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) { BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode(); typeTestExpr.expr = createExpression(typeTestExpressionNode.expression()); typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor()); typeTestExpr.pos = getPosition(typeTestExpressionNode); return typeTestExpr; } @Override public BLangNode transform(MappingConstructorExpressionNode mapConstruct) { BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); for (MappingFieldNode field : mapConstruct.fields()) { if (field.kind() == SyntaxKind.SPREAD_FIELD) { SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field; BLangRecordSpreadOperatorField bLRecordSpreadOpField = (BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField(); bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr()); bLRecordSpreadOpField.pos = getPosition(spreadFieldNode); bLiteralNode.fields.add(bLRecordSpreadOpField); } else if (field.kind() == SyntaxKind.COMPUTED_NAME_FIELD) { ComputedNameFieldNode computedNameField = (ComputedNameFieldNode) field; BLangRecordKeyValueField bLRecordKeyValueField = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); bLRecordKeyValueField.valueExpr = createExpression(computedNameField.valueExpr()); bLRecordKeyValueField.key = new BLangRecordLiteral.BLangRecordKey(createExpression(computedNameField.fieldNameExpr())); bLRecordKeyValueField.key.computedKey = true; bLiteralNode.fields.add(bLRecordKeyValueField); } else { SpecificFieldNode specificField = (SpecificFieldNode) field; io.ballerina.compiler.syntax.tree.ExpressionNode valueExpr = specificField.valueExpr().orElse(null); if (valueExpr == null) { BLangRecordLiteral.BLangRecordVarNameField fieldVar = (BLangRecordLiteral.BLangRecordVarNameField) TreeBuilder.createRecordVarRefNameFieldNode(); fieldVar.variableName = createIdentifier((Token) ((SpecificFieldNode) field).fieldName()); fieldVar.pkgAlias = createIdentifier(null, ""); fieldVar.pos = fieldVar.variableName.pos; fieldVar.readonly = specificField.readonlyKeyword().isPresent(); bLiteralNode.fields.add(fieldVar); } else { BLangRecordKeyValueField bLRecordKeyValueField = (BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue(); bLRecordKeyValueField.pos = getPosition(specificField); bLRecordKeyValueField.readonly = specificField.readonlyKeyword().isPresent(); bLRecordKeyValueField.valueExpr = createExpression(valueExpr); bLRecordKeyValueField.key = new BLangRecordLiteral.BLangRecordKey(createExpression(specificField.fieldName())); bLRecordKeyValueField.key.computedKey = false; bLRecordKeyValueField.key.pos = getPosition(specificField.fieldName()); bLiteralNode.fields.add(bLRecordKeyValueField); } } } bLiteralNode.pos = getPosition(mapConstruct); return bLiteralNode; } @Override public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) { List<BLangExpression> argExprList = new ArrayList<>(); BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr) TreeBuilder.createListConstructorExpressionNode(); for (Node expr : listConstructorExprNode.expressions()) { argExprList.add(createExpression(expr)); } listConstructorExpr.exprs = argExprList; listConstructorExpr.pos = getPosition(listConstructorExprNode); return listConstructorExpr; } @Override public BLangNode transform(UnaryExpressionNode unaryExprNode) { Location pos = getPosition(unaryExprNode); SyntaxKind expressionKind = unaryExprNode.expression().kind(); SyntaxKind unaryOperatorKind = unaryExprNode.unaryOperator().kind(); if (expressionKind == SyntaxKind.NUMERIC_LITERAL && (unaryOperatorKind == SyntaxKind.MINUS_TOKEN || unaryOperatorKind == SyntaxKind.PLUS_TOKEN)) { return createSimpleLiteral(unaryExprNode); } OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text()); BLangExpression expr = createExpression(unaryExprNode.expression()); return createBLangUnaryExpr(pos, operator, expr); } @Override public BLangNode transform(TypeofExpressionNode typeofExpressionNode) { Location pos = getPosition(typeofExpressionNode); OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text()); BLangExpression expr = createExpression(typeofExpressionNode.expression()); return createBLangUnaryExpr(pos, operator, expr); } @Override public BLangNode transform(BinaryExpressionNode binaryExprNode) { if (binaryExprNode.operator().kind() == SyntaxKind.ELVIS_TOKEN) { BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode(); elvisExpr.pos = getPosition(binaryExprNode); elvisExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr()); elvisExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr()); return elvisExpr; } BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); bLBinaryExpr.pos = getPosition(binaryExprNode); bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr()); bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr()); bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text()); return bLBinaryExpr; } @Override public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) { BLangFieldBasedAccess bLFieldBasedAccess; Node fieldName = fieldAccessExprNode.fieldName(); if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName; BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder.createFieldBasedAccessWithPrefixNode(); accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix()); accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier()); bLFieldBasedAccess = accessWithPrefixNode; bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS; } else { bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode(); bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name()); bLFieldBasedAccess.fieldKind = FieldKind.SINGLE; } io.ballerina.compiler.syntax.tree.ExpressionNode containerExpr = fieldAccessExprNode.expression(); if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) { bLFieldBasedAccess.expr = createExpression(((BracedExpressionNode) containerExpr).expression()); } else { bLFieldBasedAccess.expr = createExpression(containerExpr); } bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode); bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode.fieldName()); bLFieldBasedAccess.optionalFieldAccess = false; return bLFieldBasedAccess; } @Override public BLangNode transform(OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) { BLangFieldBasedAccess bLFieldBasedAccess; Node fieldName = optionalFieldAccessExpressionNode.fieldName(); if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName; BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder .createFieldBasedAccessWithPrefixNode(); accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix()); accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier()); bLFieldBasedAccess = accessWithPrefixNode; bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS; } else { bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode(); bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name()); bLFieldBasedAccess.fieldKind = FieldKind.SINGLE; } bLFieldBasedAccess.pos = getPosition(optionalFieldAccessExpressionNode); bLFieldBasedAccess.field.pos = getPosition(optionalFieldAccessExpressionNode.fieldName()); bLFieldBasedAccess.expr = createExpression(optionalFieldAccessExpressionNode.expression()); bLFieldBasedAccess.optionalFieldAccess = true; return bLFieldBasedAccess; } @Override public BLangNode transform(BracedExpressionNode brcExprOut) { return createExpression(brcExprOut.expression()); } @Override public BLangNode transform(FunctionCallExpressionNode functionCallNode) { return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(), getPosition(functionCallNode), isFunctionCallAsync(functionCallNode)); } @Override public BLangNode transform(ErrorConstructorExpressionNode errorConstructorExprNode) { BLangErrorConstructorExpr errorConstructorExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode(); errorConstructorExpr.pos = getPosition(errorConstructorExprNode); if (errorConstructorExprNode.typeReference().isPresent()) { errorConstructorExpr.errorTypeRef = (BLangUserDefinedType) createTypeNode(errorConstructorExprNode.typeReference().get()); } List<BLangExpression> positionalArgs = new ArrayList<>(); List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (Node argNode : errorConstructorExprNode.arguments()) { if (argNode.kind() == SyntaxKind.POSITIONAL_ARG) { positionalArgs.add((BLangExpression) transform((PositionalArgumentNode) argNode)); } else if (argNode.kind() == SyntaxKind.NAMED_ARG) { namedArgs.add((BLangNamedArgsExpression) transform((NamedArgumentNode) argNode)); } } errorConstructorExpr.positionalArgs = positionalArgs; errorConstructorExpr.namedArgs = namedArgs; return errorConstructorExpr; } public BLangNode transform(MethodCallExpressionNode methodCallExprNode) { BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(), methodCallExprNode.arguments(), getPosition(methodCallExprNode), false); bLInvocation.expr = createExpression(methodCallExprNode.expression()); return bLInvocation; } @Override public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) { BLangTypeInit initNode = createTypeInit(implicitNewExprNode); BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword()); initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; return initNode; } @Override public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) { BLangTypeInit initNode = createTypeInit(explicitNewExprNode); BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword()); initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; return initNode; } private boolean isFunctionCallAsync(FunctionCallExpressionNode functionCallExpressionNode) { return functionCallExpressionNode.parent().kind() == SyntaxKind.START_ACTION; } private BLangTypeInit createTypeInit(NewExpressionNode expression) { BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = getPosition(expression); if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) { Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor(); initNode.userDefinedType = createTypeNode(type); } return initNode; } private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = getPosition(expression); populateArgsInvocation(expression, invocationNode); BLangNameReference nameReference = createBLangNameReference(newKeyword); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; return invocationNode; } private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) { Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression); if (argumentsIter != null) { while (argumentsIter.hasNext()) { BLangExpression argument = createExpression(argumentsIter.next()); invocationNode.argExprs.add(argument); } } } private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) { Iterator<FunctionArgumentNode> argumentsIter = null; if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) { Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList(); if (argsList.isPresent()) { ParenthesizedArgList argList = argsList.get(); argumentsIter = argList.arguments().iterator(); } } else { ParenthesizedArgList argList = (ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList(); argumentsIter = argList.arguments().iterator(); } return argumentsIter; } @Override public BLangNode transform(IndexedExpressionNode indexedExpressionNode) { BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); indexBasedAccess.pos = getPosition(indexedExpressionNode); SeparatedNodeList<io.ballerina.compiler.syntax.tree.ExpressionNode> keys = indexedExpressionNode.keyExpression(); if (keys.size() == 1) { indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression().get(0)); } else { BLangTableMultiKeyExpr multiKeyExpr = (BLangTableMultiKeyExpr) TreeBuilder.createTableMultiKeyExpressionNode(); multiKeyExpr.pos = getPosition(keys.get(0), keys.get(keys.size() - 1)); List<BLangExpression> multiKeyIndexExprs = new ArrayList<>(); for (io.ballerina.compiler.syntax.tree.ExpressionNode keyExpr : keys) { multiKeyIndexExprs.add(createExpression(keyExpr)); } multiKeyExpr.multiKeyIndexExprs = multiKeyIndexExprs; indexBasedAccess.indexExpr = multiKeyExpr; } Node containerExpr = indexedExpressionNode.containerExpression(); BLangExpression expression = createExpression(containerExpr); if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) { indexBasedAccess.expr = ((BLangGroupExpr) expression).expression; BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode(); group.expression = indexBasedAccess; group.pos = getPosition(indexedExpressionNode); return group; } else if (containerExpr.kind() == SyntaxKind.XML_STEP_EXPRESSION) { ((BLangXMLNavigationAccess) expression).childIndex = indexBasedAccess.indexExpr; return expression; } indexBasedAccess.expr = expression; return indexBasedAccess; } @Override public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) { BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); typeConversionNode.pos = getPosition(typeCastExpressionNode); TypeCastParamNode typeCastParamNode = typeCastExpressionNode.typeCastParam(); if (typeCastParamNode != null && typeCastParamNode.type().isPresent()) { typeConversionNode.typeNode = createTypeNode(typeCastParamNode.type().get()); } typeConversionNode.expr = createExpression(typeCastExpressionNode.expression()); typeConversionNode.annAttachments = applyAll(typeCastParamNode.annotations()); return typeConversionNode; } @Override public BLangNode transform(Token token) { SyntaxKind kind = token.kind(); switch (kind) { case XML_TEXT_CONTENT: case TEMPLATE_STRING: case CLOSE_BRACE_TOKEN: return createSimpleLiteral(token); default: throw new RuntimeException("Syntax kind is not supported: " + kind); } } @Override public BLangNode transform(InterpolationNode interpolationNode) { return createExpression(interpolationNode.expression()); } @Override public BLangNode transform(TemplateExpressionNode expressionNode) { SyntaxKind kind = expressionNode.kind(); switch (kind) { case XML_TEMPLATE_EXPRESSION: BLangNode xmlTemplateLiteral = createXmlTemplateLiteral(expressionNode); xmlTemplateLiteral.pos = getPosition(expressionNode); return xmlTemplateLiteral; case STRING_TEMPLATE_EXPRESSION: return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode)); case RAW_TEMPLATE_EXPRESSION: return createRawTemplateLiteral(expressionNode.content(), getPosition(expressionNode)); default: throw new RuntimeException("Syntax kind is not supported: " + kind); } } @Override public BLangNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) { BLangTableConstructorExpr tableConstructorExpr = (BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode(); tableConstructorExpr.pos = getPosition(tableConstructorExpressionNode); for (Node row : tableConstructorExpressionNode.rows()) { tableConstructorExpr.addRecordLiteral((BLangRecordLiteral) row.apply(this)); } if (tableConstructorExpressionNode.keySpecifier().isPresent()) { tableConstructorExpr.tableKeySpecifier = (BLangTableKeySpecifier) tableConstructorExpressionNode.keySpecifier().orElse(null).apply(this); } return tableConstructorExpr; } @Override public BLangNode transform(TrapExpressionNode trapExpressionNode) { BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.expr = createExpression(trapExpressionNode.expression()); trapExpr.pos = getPosition(trapExpressionNode); return trapExpr; } @Override public BLangNode transform(ReceiveActionNode receiveActionNode) { BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode(); Node receiveWorkers = receiveActionNode.receiveWorkers(); Token workerName; if (receiveWorkers.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) { workerName = ((SimpleNameReferenceNode) receiveWorkers).name(); } else { Location receiveFieldsPos = getPosition(receiveWorkers); dlog.error(receiveFieldsPos, DiagnosticErrorCode.MULTIPLE_RECEIVE_ACTION_NOT_YET_SUPPORTED); workerName = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN, NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList()); } workerReceiveExpr.setWorkerName(createIdentifier(workerName)); workerReceiveExpr.pos = getPosition(receiveActionNode); return workerReceiveExpr; } @Override public BLangNode transform(SyncSendActionNode syncSendActionNode) { BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode(); workerSendExpr.setWorkerName(createIdentifier( syncSendActionNode.peerWorker().name())); workerSendExpr.expr = createExpression(syncSendActionNode.expression()); workerSendExpr.pos = getPosition(syncSendActionNode); return workerSendExpr; } @Override public BLangNode transform(ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) { BLangArrowFunction arrowFunction = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode(); arrowFunction.pos = getPosition(implicitAnonymousFunctionExpressionNode); arrowFunction.functionName = createIdentifier(arrowFunction.pos, anonymousModelHelper.getNextAnonymousFunctionKey(packageID)); Node param = implicitAnonymousFunctionExpressionNode.params(); if (param.kind() == SyntaxKind.INFER_PARAM_LIST) { ImplicitAnonymousFunctionParameters paramsNode = (ImplicitAnonymousFunctionParameters) param; SeparatedNodeList<SimpleNameReferenceNode> paramList = paramsNode.parameters(); for (SimpleNameReferenceNode child : paramList) { BLangUserDefinedType userDefinedType = (BLangUserDefinedType) child.apply(this); BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); parameter.name = userDefinedType.typeName; parameter.pos = getPosition(child); arrowFunction.params.add(parameter); } } else { BLangUserDefinedType userDefinedType = (BLangUserDefinedType) param.apply(this); BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); parameter.name = userDefinedType.typeName; parameter.pos = getPosition(param); arrowFunction.params.add(parameter); } arrowFunction.body = new BLangExprFunctionBody(); arrowFunction.body.expr = createExpression(implicitAnonymousFunctionExpressionNode.expression()); arrowFunction.body.pos = arrowFunction.body.expr.pos; return arrowFunction; } @Override public BLangNode transform(CommitActionNode commitActionNode) { BLangCommitExpr commitExpr = TreeBuilder.createCommitExpressionNode(); commitExpr.pos = getPosition(commitActionNode); return commitExpr; } @Override public BLangNode transform(FlushActionNode flushActionNode) { BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode(); Node optionalPeerWorker = flushActionNode.peerWorker().orElse(null); if (optionalPeerWorker != null) { SimpleNameReferenceNode peerWorker = (SimpleNameReferenceNode) optionalPeerWorker; workerFlushExpr.workerIdentifier = createIdentifier(peerWorker.name()); } workerFlushExpr.pos = getPosition(flushActionNode); return workerFlushExpr; } @Override public BLangNode transform(LetExpressionNode letExpressionNode) { BLangLetExpression letExpr = (BLangLetExpression) TreeBuilder.createLetExpressionNode(); letExpr.pos = getPosition(letExpressionNode); letExpr.expr = createExpression(letExpressionNode.expression()); List<BLangLetVariable> letVars = new ArrayList<>(); for (LetVariableDeclarationNode letVarDecl : letExpressionNode.letVarDeclarations()) { letVars.add(createLetVariable(letVarDecl)); } letExpr.letVarDeclarations = letVars; return letExpr; } public BLangLetVariable createLetVariable(LetVariableDeclarationNode letVarDecl) { BLangLetVariable letVar = TreeBuilder.createLetVariableNode(); VariableDefinitionNode varDefNode = createBLangVarDef(getPosition(letVarDecl), letVarDecl.typedBindingPattern(), Optional.of(letVarDecl.expression()), Optional.empty()); varDefNode.getVariable().addFlag(Flag.FINAL); List<BLangNode> annots = applyAll(letVarDecl.annotations()); for (BLangNode node : annots) { varDefNode.getVariable().addAnnotationAttachment((AnnotationAttachmentNode) node); } letVar.definitionNode = varDefNode; return letVar; } @Override public BLangNode transform(MappingBindingPatternNode mappingBindingPatternNode) { BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode(); recordVarRef.pos = getPosition(mappingBindingPatternNode); List<BLangRecordVarRefKeyValue> expressions = new ArrayList<>(); for (BindingPatternNode expr : mappingBindingPatternNode.fieldBindingPatterns()) { if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) { recordVarRef.restParam = createExpression(expr); } else { expressions.add(createRecordVarKeyValue(expr)); } } recordVarRef.recordRefFields = expressions; return recordVarRef; } private BLangRecordVarRefKeyValue createRecordVarKeyValue(BindingPatternNode expr) { BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue(); if (expr instanceof FieldBindingPatternFullNode) { FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) expr; keyValue.variableName = createIdentifier(fullNode.variableName().name()); keyValue.variableReference = createExpression(fullNode.bindingPattern()); } else { FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) expr; keyValue.variableName = createIdentifier(varnameNode.variableName().name()); BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = getPosition(varnameNode.variableName()); varRef.variableName = createIdentifier(varnameNode.variableName().name()); varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); keyValue.variableReference = varRef; } return keyValue; } @Override public BLangNode transform(ListBindingPatternNode listBindingPatternNode) { BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode(); List<BLangExpression> expressions = new ArrayList<>(); for (BindingPatternNode expr : listBindingPatternNode.bindingPatterns()) { if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) { tupleVarRef.restParam = createExpression(expr); } else { expressions.add(createExpression(expr)); } } tupleVarRef.expressions = expressions; tupleVarRef.pos = getPosition(listBindingPatternNode); return tupleVarRef; } @Override public BLangNode transform(RestBindingPatternNode restBindingPatternNode) { return createExpression(restBindingPatternNode.variableName()); } @Override public BLangNode transform(CaptureBindingPatternNode captureBindingPatternNode) { return createExpression(captureBindingPatternNode.variableName()); } @Override public BLangNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) { BLangSimpleVarRef ignoreVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); BLangIdentifier ignore = (BLangIdentifier) TreeBuilder.createIdentifierNode(); ignore.value = Names.IGNORE.value; ignoreVarRef.variableName = ignore; ignore.pos = getPosition(wildcardBindingPatternNode); return ignoreVarRef; } @Override public BLangNode transform(ErrorBindingPatternNode errorBindingPatternNode) { BLangErrorVarRef errorVarRef = (BLangErrorVarRef) TreeBuilder.createErrorVariableReferenceNode(); errorVarRef.pos = getPosition(errorBindingPatternNode); Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference(); if (errorTypeRef.isPresent()) { errorVarRef.typeNode = createTypeNode(errorTypeRef.get()); } SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns(); int numberOfArgs = argListBindingPatterns.size(); List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (int position = 0; position < numberOfArgs; position++) { BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position); switch (bindingPatternNode.kind()) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: if (position == 0) { errorVarRef.message = (BLangVariableReference) createExpression(bindingPatternNode); break; } case ERROR_BINDING_PATTERN: errorVarRef.cause = (BLangVariableReference) createExpression(bindingPatternNode); break; case NAMED_ARG_BINDING_PATTERN: namedArgs.add((BLangNamedArgsExpression) bindingPatternNode.apply(this)); break; default: errorVarRef.restVar = (BLangVariableReference) createExpression(bindingPatternNode); } } errorVarRef.detail = namedArgs; return errorVarRef; } @Override public BLangNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) { BLangNamedArgsExpression namedArgsExpression = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode(); namedArgsExpression.pos = getPosition(namedArgBindingPatternNode); namedArgsExpression.name = createIdentifier(namedArgBindingPatternNode.argName()); namedArgsExpression.expr = createExpression(namedArgBindingPatternNode.bindingPattern()); return namedArgsExpression; } @Override public BLangNode transform(ReturnStatementNode returnStmtNode) { BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode(); bLReturn.pos = getPosition(returnStmtNode); if (returnStmtNode.expression().isPresent()) { bLReturn.expr = createExpression(returnStmtNode.expression().get()); } else { BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); nilLiteral.pos = getPosition(returnStmtNode); nilLiteral.value = Names.NIL_VALUE; nilLiteral.setBType(symTable.nilType); bLReturn.expr = nilLiteral; } return bLReturn; } @Override public BLangNode transform(PanicStatementNode panicStmtNode) { BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode(); bLPanic.pos = getPosition(panicStmtNode); bLPanic.expr = createExpression(panicStmtNode.expression()); return bLPanic; } @Override public BLangNode transform(ContinueStatementNode continueStmtNode) { BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode(); bLContinue.pos = getPosition(continueStmtNode); return bLContinue; } @Override public BLangNode transform(ListenerDeclarationNode listenerDeclarationNode) { Token visibilityQualifier = null; if (listenerDeclarationNode.visibilityQualifier().isPresent()) { visibilityQualifier = listenerDeclarationNode.visibilityQualifier().get(); } BLangSimpleVariable var = new SimpleVarBuilder() .with(listenerDeclarationNode.variableName()) .setTypeByNode(listenerDeclarationNode.typeDescriptor().orElse(null)) .setExpressionByNode(listenerDeclarationNode.initializer()) .setVisibility(visibilityQualifier) .isListenerVar() .build(); var.pos = getPositionWithoutMetadata(listenerDeclarationNode); var.name.pos = getPosition(listenerDeclarationNode.variableName()); var.annAttachments = applyAll(getAnnotations(listenerDeclarationNode.metadata())); return var; } @Override public BLangNode transform(BreakStatementNode breakStmtNode) { BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode(); bLBreak.pos = getPosition(breakStmtNode); return bLBreak; } @Override public BLangNode transform(AssignmentStatementNode assignmentStmtNode) { SyntaxKind lhsKind = assignmentStmtNode.varRef().kind(); switch (lhsKind) { case LIST_BINDING_PATTERN: return createTupleDestructureStatement(assignmentStmtNode); case MAPPING_BINDING_PATTERN: return createRecordDestructureStatement(assignmentStmtNode); case ERROR_BINDING_PATTERN: return createErrorDestructureStatement(assignmentStmtNode); default: break; } BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode(); BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef()); validateLvexpr(lhsExpr, DiagnosticErrorCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT); bLAssignment.setExpression(createExpression(assignmentStmtNode.expression())); bLAssignment.pos = getPosition(assignmentStmtNode); bLAssignment.varRef = lhsExpr; return bLAssignment; } public BLangNode createTupleDestructureStatement(AssignmentStatementNode assignmentStmtNode) { BLangTupleDestructure tupleDestructure = (BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode(); tupleDestructure.varRef = (BLangTupleVarRef) createExpression(assignmentStmtNode.varRef()); tupleDestructure.setExpression(createExpression(assignmentStmtNode.expression())); tupleDestructure.pos = getPosition(assignmentStmtNode); return tupleDestructure; } public BLangNode createRecordDestructureStatement(AssignmentStatementNode assignmentStmtNode) { BLangRecordDestructure recordDestructure = (BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode(); recordDestructure.varRef = (BLangRecordVarRef) createExpression(assignmentStmtNode.varRef()); recordDestructure.setExpression(createExpression(assignmentStmtNode.expression())); recordDestructure.pos = getPosition(assignmentStmtNode); return recordDestructure; } public BLangNode createErrorDestructureStatement(AssignmentStatementNode assignmentStmtNode) { BLangErrorDestructure errorDestructure = (BLangErrorDestructure) TreeBuilder.createErrorDestructureStatementNode(); errorDestructure.varRef = (BLangErrorVarRef) createExpression(assignmentStmtNode.varRef()); errorDestructure.setExpression(createExpression(assignmentStmtNode.expression())); errorDestructure.pos = getPosition(assignmentStmtNode); return errorDestructure; } @Override public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) { BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode(); bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression())); bLCompAssignment .setVariable((VariableReferenceNode) createExpression(compoundAssignmentStmtNode.lhsExpression())); bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode); bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text()); return bLCompAssignment; } private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) { if (lExprNode.getKind() == NodeKind.INVOCATION) { dlog.error(((BLangInvocation) lExprNode).pos, errorCode); } if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode); } } @Override public BLangNode transform(DoStatementNode doStatementNode) { BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode(); bLDo.pos = getPosition(doStatementNode); BLangBlockStmt bLBlockStmt = (BLangBlockStmt) doStatementNode.blockStatement().apply(this); bLBlockStmt.pos = getPosition(doStatementNode.blockStatement()); bLDo.setBody(bLBlockStmt); doStatementNode.onFailClause().ifPresent(onFailClauseNode -> { bLDo.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return bLDo; } @Override public BLangNode transform(FailStatementNode failStatementNode) { BLangFail bLFail = (BLangFail) TreeBuilder.createFailNode(); bLFail.pos = getPosition(failStatementNode); bLFail.expr = createExpression(failStatementNode.expression()); return bLFail; } @Override public BLangNode transform(WhileStatementNode whileStmtNode) { BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode(); bLWhile.setCondition(createExpression(whileStmtNode.condition())); bLWhile.pos = getPosition(whileStmtNode); BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this); bLBlockStmt.pos = getPosition(whileStmtNode.whileBody()); bLWhile.setBody(bLBlockStmt); whileStmtNode.onFailClause().ifPresent(onFailClauseNode -> { bLWhile.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return bLWhile; } @Override public BLangNode transform(IfElseStatementNode ifElseStmtNode) { BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode(); bLIf.pos = getPosition(ifElseStmtNode); bLIf.setCondition(createExpression(ifElseStmtNode.condition())); bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this)); ifElseStmtNode.elseBody().ifPresent(elseBody -> { ElseBlockNode elseNode = (ElseBlockNode) elseBody; bLIf.setElseStatement( (org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this)); }); return bLIf; } @Override public BLangNode transform(BlockStatementNode blockStatement) { BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode(); this.isInLocalContext = true; bLBlockStmt.stmts = generateBLangStatements(blockStatement.statements()); this.isInLocalContext = false; bLBlockStmt.pos = getPosition(blockStatement); SyntaxKind parent = blockStatement.parent().kind(); if (parent == SyntaxKind.IF_ELSE_STATEMENT || parent == SyntaxKind.ELSE_BLOCK) { bLBlockStmt.pos = expandLeft(bLBlockStmt.pos, getPosition(blockStatement.parent())); } return bLBlockStmt; } @Override public BLangNode transform(RollbackStatementNode rollbackStatementNode) { BLangRollback rollbackStmt = (BLangRollback) TreeBuilder.createRollbackNode(); rollbackStmt.pos = getPosition(rollbackStatementNode); if (rollbackStatementNode.expression().isPresent()) { rollbackStmt.expr = createExpression(rollbackStatementNode.expression().get()); } return rollbackStmt; } @Override public BLangNode transform(LockStatementNode lockStatementNode) { BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode(); lockNode.pos = getPosition(lockStatementNode); BLangBlockStmt lockBlock = (BLangBlockStmt) lockStatementNode.blockStatement().apply(this); lockBlock.pos = getPosition(lockStatementNode.blockStatement()); lockNode.setBody(lockBlock); lockStatementNode.onFailClause().ifPresent(onFailClauseNode -> { lockNode.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return lockNode; } @Override public BLangNode transform(VariableDeclarationNode varDeclaration) { return (BLangNode) createBLangVarDef(getPosition(varDeclaration), varDeclaration.typedBindingPattern(), varDeclaration.initializer(), varDeclaration.finalKeyword()); } private VariableDefinitionNode createBLangVarDef(Location location, TypedBindingPatternNode typedBindingPattern, Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer, Optional<Token> finalKeyword) { BindingPatternNode bindingPattern = typedBindingPattern.bindingPattern(); BLangVariable variable = getBLangVariableNode(bindingPattern); List<Token> qualifiers = new ArrayList<>(); if (finalKeyword.isPresent()) { qualifiers.add(finalKeyword.get()); } NodeList<Token> qualifierList = NodeFactory.createNodeList(qualifiers); switch (bindingPattern.kind()) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: BLangSimpleVariableDef bLVarDef = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode(); bLVarDef.pos = variable.pos = location; BLangExpression expr = initializer.isPresent() ? createExpression(initializer.get()) : null; variable.setInitialExpression(expr); bLVarDef.setVariable(variable); if (finalKeyword.isPresent()) { variable.flagSet.add(Flag.FINAL); } TypeDescriptorNode typeDesc = typedBindingPattern.typeDescriptor(); variable.isDeclaredWithVar = isDeclaredWithVar(typeDesc); if (!variable.isDeclaredWithVar) { variable.setTypeNode(createTypeNode(typeDesc)); } return bLVarDef; case MAPPING_BINDING_PATTERN: initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer, qualifierList); return createRecordVariableDef(variable); case LIST_BINDING_PATTERN: initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer, qualifierList); return createTupleVariableDef(variable); case ERROR_BINDING_PATTERN: initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer, qualifierList); return createErrorVariableDef(variable); default: throw new RuntimeException( "Syntax kind is not a valid binding pattern " + typedBindingPattern.bindingPattern().kind()); } } private void initializeBLangVariable(BLangVariable var, TypeDescriptorNode type, Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer, NodeList<Token> qualifiers) { for (Token qualifier : qualifiers) { SyntaxKind kind = qualifier.kind(); if (kind == SyntaxKind.FINAL_KEYWORD) { markVariableWithFlag(var, Flag.FINAL); } else if (qualifier.kind() == SyntaxKind.CONFIGURABLE_KEYWORD) { var.flagSet.add(Flag.CONFIGURABLE); var.flagSet.add(Flag.FINAL); if (initializer.get().kind() == SyntaxKind.REQUIRED_EXPRESSION) { var.flagSet.add(Flag.REQUIRED); initializer = Optional.empty(); } } else if (kind == SyntaxKind.ISOLATED_KEYWORD) { var.flagSet.add(Flag.ISOLATED); } } var.isDeclaredWithVar = isDeclaredWithVar(type); if (!var.isDeclaredWithVar) { var.setTypeNode(createTypeNode(type)); } if (initializer.isPresent()) { var.setInitialExpression(createExpression(initializer.get())); } } private BLangRecordVariableDef createRecordVariableDef(BLangVariable var) { BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode(); varDefNode.pos = var.pos; varDefNode.setVariable(var); return varDefNode; } private BLangTupleVariableDef createTupleVariableDef(BLangVariable tupleVar) { BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode(); varDefNode.pos = tupleVar.pos; varDefNode.setVariable(tupleVar); return varDefNode; } private BLangErrorVariableDef createErrorVariableDef(BLangVariable errorVar) { BLangErrorVariableDef varDefNode = (BLangErrorVariableDef) TreeBuilder.createErrorVariableDefinitionNode(); varDefNode.pos = errorVar.pos; varDefNode.setVariable(errorVar); return varDefNode; } @Override public BLangNode transform(ExpressionStatementNode expressionStatement) { SyntaxKind kind = expressionStatement.expression().kind(); switch (kind) { case ASYNC_SEND_ACTION: return expressionStatement.expression().apply(this); default: BLangExpressionStmt bLExpressionStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); bLExpressionStmt.expr = createExpression(expressionStatement.expression()); bLExpressionStmt.pos = getPosition(expressionStatement); return bLExpressionStmt; } } @Override public BLangNode transform(AsyncSendActionNode asyncSendActionNode) { BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode(); workerSendNode.setWorkerName(createIdentifier(getPosition(asyncSendActionNode.peerWorker()), asyncSendActionNode.peerWorker().name())); workerSendNode.expr = createExpression(asyncSendActionNode.expression()); workerSendNode.pos = getPosition(asyncSendActionNode); return workerSendNode; } @Override public BLangNode transform(WaitActionNode waitActionNode) { Node waitFutureExpr = waitActionNode.waitFutureExpr(); if (waitFutureExpr.kind() == SyntaxKind.WAIT_FIELDS_LIST) { return getWaitForAllExpr((WaitFieldsListNode) waitFutureExpr); } BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode(); waitExpr.pos = getPosition(waitActionNode); waitExpr.exprList = Collections.singletonList(createExpression(waitFutureExpr)); return waitExpr; } private BLangWaitForAllExpr getWaitForAllExpr(WaitFieldsListNode waitFields) { BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode(); List<BLangWaitKeyValue> exprs = new ArrayList<>(); for (Node waitField : waitFields.waitFields()) { exprs.add(getWaitForAllExpr(waitField)); } bLangWaitForAll.keyValuePairs = exprs; bLangWaitForAll.pos = getPosition(waitFields); return bLangWaitForAll; } private BLangWaitKeyValue getWaitForAllExpr(Node waitFields) { BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode(); keyValue.pos = getPosition(waitFields); if (waitFields.kind() == SyntaxKind.WAIT_FIELD) { WaitFieldNode waitFieldNode = (WaitFieldNode) waitFields; BLangIdentifier key = createIdentifier(waitFieldNode.fieldName().name()); key.setLiteral(false); keyValue.key = key; keyValue.valueExpr = createExpression(waitFieldNode.waitFutureExpr()); return keyValue; } SimpleNameReferenceNode varName = (SimpleNameReferenceNode) waitFields; BLangIdentifier key = createIdentifier(varName.name()); key.setLiteral(false); keyValue.key = key; BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = getPosition(varName); varRef.variableName = key; varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); keyValue.keyExpr = varRef; return keyValue; } @Override public BLangNode transform(StartActionNode startActionNode) { BLangNode expression = createActionOrExpression(startActionNode.expression()); BLangInvocation invocation; if (!(expression instanceof BLangWorkerSend)) { invocation = (BLangInvocation) expression; } else { invocation = (BLangInvocation) ((BLangWorkerSend) expression).expr; expression = ((BLangWorkerSend) expression).expr; } if (expression.getKind() == NodeKind.INVOCATION) { BLangActionInvocation actionInvocation = (BLangActionInvocation) TreeBuilder.createActionInvocation(); actionInvocation.expr = invocation.expr; actionInvocation.pkgAlias = invocation.pkgAlias; actionInvocation.name = invocation.name; actionInvocation.argExprs = invocation.argExprs; actionInvocation.flagSet = invocation.flagSet; actionInvocation.pos = getPosition(startActionNode); invocation = actionInvocation; } invocation.async = true; invocation.annAttachments = applyAll(startActionNode.annotations()); return invocation; } @Override public BLangNode transform(TransactionStatementNode transactionStatementNode) { BLangTransaction transaction = (BLangTransaction) TreeBuilder.createTransactionNode(); BLangBlockStmt transactionBlock = (BLangBlockStmt) transactionStatementNode.blockStatement().apply(this); transactionBlock.pos = getPosition(transactionStatementNode.blockStatement()); transaction.setTransactionBody(transactionBlock); transaction.pos = getPosition(transactionStatementNode); transactionStatementNode.onFailClause().ifPresent(onFailClauseNode -> { transaction.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return transaction; } @Override public BLangNode transform(PositionalArgumentNode argumentNode) { return createExpression(argumentNode.expression()); } @Override public BLangNode transform(NamedArgumentNode namedArgumentNode) { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode(); namedArg.pos = getPosition(namedArgumentNode); namedArg.name = this.createIdentifier(namedArgumentNode.argumentName().name()); namedArg.expr = createExpression(namedArgumentNode.expression()); return namedArg; } @Override public BLangNode transform(RestArgumentNode restArgumentNode) { BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); varArgs.pos = getPosition(restArgumentNode.ellipsis()); varArgs.expr = createExpression(restArgumentNode.expression()); return varArgs; } @Override public BLangNode transform(RequiredParameterNode requiredParameter) { BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(), requiredParameter.typeName(), requiredParameter.annotations()); simpleVar.pos = getPosition(requiredParameter); if (requiredParameter.paramName().isPresent()) { simpleVar.name.pos = getPosition(requiredParameter.paramName().get()); } simpleVar.flagSet.add(Flag.REQUIRED_PARAM); return simpleVar; } @Override public BLangNode transform(IncludedRecordParameterNode includedRecordParameterNode) { BLangSimpleVariable simpleVar = createSimpleVar(includedRecordParameterNode.paramName(), includedRecordParameterNode.typeName(), includedRecordParameterNode.annotations()); simpleVar.flagSet.add(INCLUDED); simpleVar.pos = getPosition(includedRecordParameterNode); if (includedRecordParameterNode.paramName().isPresent()) { simpleVar.name.pos = getPosition(includedRecordParameterNode.paramName().get()); } simpleVar.pos = trimLeft(simpleVar.pos, getPosition(includedRecordParameterNode.typeName())); return simpleVar; } @Override public BLangNode transform(DefaultableParameterNode defaultableParameter) { BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(), defaultableParameter.typeName(), defaultableParameter.annotations()); simpleVar.setInitialExpression(createExpression(defaultableParameter.expression())); simpleVar.flagSet.add(Flag.DEFAULTABLE_PARAM); simpleVar.pos = getPosition(defaultableParameter); return simpleVar; } @Override public BLangNode transform(RestParameterNode restParameter) { BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName(), restParameter.annotations()); BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode(); bLArrayType.elemtype = bLSimpleVar.typeNode; bLArrayType.dimensions = 1; bLSimpleVar.typeNode = bLArrayType; bLArrayType.pos = getPosition(restParameter.typeName()); bLSimpleVar.flagSet.add(Flag.REST_PARAM); bLSimpleVar.pos = getPosition(restParameter); return bLSimpleVar; } @Override public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) { BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken()); nilTypeNode.typeKind = TypeKind.NIL; BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor())); unionTypeNode.memberTypeNodes.add(nilTypeNode); unionTypeNode.nullable = true; unionTypeNode.pos = getPosition(optTypeDescriptor); return unionTypeNode; } @Override public BLangNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) { BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode(); functionTypeNode.pos = getPosition(functionTypeDescriptorNode); functionTypeNode.returnsKeywordExists = true; if (functionTypeDescriptorNode.functionSignature().isPresent()) { FunctionSignatureNode funcSignature = functionTypeDescriptorNode.functionSignature().get(); for (ParameterNode child : funcSignature.parameters()) { SimpleVariableNode param = (SimpleVariableNode) child.apply(this); if (child.kind() == SyntaxKind.REST_PARAM) { functionTypeNode.restParam = (BLangSimpleVariable) param; } else { functionTypeNode.params.add((BLangVariable) param); } } Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc(); if (retNode.isPresent()) { ReturnTypeDescriptorNode returnType = retNode.get(); functionTypeNode.returnTypeNode = createTypeNode(returnType.type()); } else { BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode(); bLValueType.pos = getPosition(funcSignature); bLValueType.typeKind = TypeKind.NIL; functionTypeNode.returnTypeNode = bLValueType; } } else { functionTypeNode.flagSet.add(Flag.ANY_FUNCTION); } functionTypeNode.flagSet.add(Flag.PUBLIC); for (Token token : functionTypeDescriptorNode.qualifierList()) { if (token.kind() == SyntaxKind.ISOLATED_KEYWORD) { functionTypeNode.flagSet.add(Flag.ISOLATED); } else if (token.kind() == SyntaxKind.TRANSACTIONAL_KEYWORD) { functionTypeNode.flagSet.add(Flag.TRANSACTIONAL); } } return functionTypeNode; } @Override public BLangNode transform(MapTypeDescriptorNode mapTypeDescNode) { BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = TypeKind.MAP; refType.pos = getPosition(mapTypeDescNode); BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode(); constrainedType.type = refType; constrainedType.constraint = createTypeNode(mapTypeDescNode.mapTypeParamsNode().typeNode()); constrainedType.pos = refType.pos; return constrainedType; } @Override public BLangNode transform(KeySpecifierNode keySpecifierNode) { BLangTableKeySpecifier tableKeySpecifierNode = (BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode(); tableKeySpecifierNode.pos = getPosition(keySpecifierNode); for (Token field : keySpecifierNode.fieldNames()) { tableKeySpecifierNode.addFieldNameIdentifier(createIdentifier(field)); } return tableKeySpecifierNode; } @Override public BLangNode transform(KeyTypeConstraintNode keyTypeConstraintNode) { BLangTableKeyTypeConstraint tableKeyTypeConstraint = new BLangTableKeyTypeConstraint(); tableKeyTypeConstraint.pos = getPosition(keyTypeConstraintNode); tableKeyTypeConstraint.keyType = createTypeNode(keyTypeConstraintNode.typeParameterNode()); return tableKeyTypeConstraint; } @Override public BLangNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) { BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = TreeUtils.stringToTypeKind(tableTypeDescriptorNode.tableKeywordToken().text()); refType.pos = getPosition(tableTypeDescriptorNode); BLangTableTypeNode tableTypeNode = (BLangTableTypeNode) TreeBuilder.createTableTypeNode(); tableTypeNode.pos = getPosition(tableTypeDescriptorNode); tableTypeNode.type = refType; tableTypeNode.constraint = createTypeNode(tableTypeDescriptorNode.rowTypeParameterNode()); if (tableTypeDescriptorNode.keyConstraintNode().isPresent()) { Node constraintNode = tableTypeDescriptorNode.keyConstraintNode().get(); if (constraintNode.kind() == SyntaxKind.KEY_TYPE_CONSTRAINT) { tableTypeNode.tableKeyTypeConstraint = (BLangTableKeyTypeConstraint) constraintNode.apply(this); } else if (constraintNode.kind() == SyntaxKind.KEY_SPECIFIER) { tableTypeNode.tableKeySpecifier = (BLangTableKeySpecifier) constraintNode.apply(this); } } tableTypeNode.isTypeInlineDefined = checkIfAnonymous(tableTypeDescriptorNode); return tableTypeNode; } @Override public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) { BLangUserDefinedType bLUserDefinedType = new BLangUserDefinedType(); bLUserDefinedType.pos = getPosition(simpleNameRefNode); bLUserDefinedType.typeName = createIdentifier(simpleNameRefNode.name()); bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); return bLUserDefinedType; } @Override public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = getPosition(qualifiedNameReferenceNode); varRef.variableName = createIdentifier(qualifiedNameReferenceNode.identifier()); varRef.pkgAlias = createIdentifier(qualifiedNameReferenceNode.modulePrefix()); return varRef; } @Override public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) { BLangXMLProcInsLiteral xmlProcInsLiteral = (BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode(); if (xmlProcessingInstruction.data().isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = getPosition(xmlProcessingInstruction); xmlProcInsLiteral.dataFragments.add(emptyLiteral); } else { for (Node dataNode : xmlProcessingInstruction.data()) { xmlProcInsLiteral.dataFragments.add(createExpression(dataNode)); } } XMLNameNode target = xmlProcessingInstruction.target(); if (target.kind() == SyntaxKind.XML_SIMPLE_NAME) { xmlProcInsLiteral.target = createSimpleLiteral(((XMLSimpleNameNode) target).name()); } else { xmlProcInsLiteral.target = createSimpleLiteral(((XMLQualifiedNameNode) target).prefix()); } xmlProcInsLiteral.pos = getPosition(xmlProcessingInstruction); return xmlProcInsLiteral; } @Override public BLangNode transform(XMLComment xmlComment) { BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode(); Location pos = getPosition(xmlComment); if (xmlComment.content().isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = pos; xmlCommentLiteral.textFragments.add(emptyLiteral); } else { for (Node commentNode : xmlComment.content()) { xmlCommentLiteral.textFragments.add(createExpression(commentNode)); } } xmlCommentLiteral.pos = pos; return xmlCommentLiteral; } @Override public BLangNode transform(XMLElementNode xmlElementNode) { BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode(); xmlElement.startTagName = createExpression(xmlElementNode.startTag()); xmlElement.endTagName = createExpression(xmlElementNode.endTag()); for (Node node : xmlElementNode.content()) { if (node.kind() == SyntaxKind.XML_TEXT) { xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content())); continue; } xmlElement.children.add(createExpression(node)); } for (XMLAttributeNode attribute : xmlElementNode.startTag().attributes()) { xmlElement.attributes.add((BLangXMLAttribute) attribute.apply(this)); } xmlElement.pos = getPosition(xmlElementNode); xmlElement.isRoot = true; return xmlElement; } @Override public BLangNode transform(XMLAttributeNode xmlAttributeNode) { BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode(); xmlAttribute.value = (BLangXMLQuotedString) xmlAttributeNode.value().apply(this); xmlAttribute.name = createExpression(xmlAttributeNode.attributeName()); xmlAttribute.pos = getPosition(xmlAttributeNode); return xmlAttribute; } @Override public BLangNode transform(ByteArrayLiteralNode byteArrayLiteralNode) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.pos = getPosition(byteArrayLiteralNode); literal.setBType(symTable.getTypeFromTag(TypeTags.BYTE_ARRAY)); literal.getBType().tag = TypeTags.BYTE_ARRAY; literal.value = getValueFromByteArrayNode(byteArrayLiteralNode); literal.originalValue = String.valueOf(literal.value); return literal; } @Override public BLangNode transform(XMLAttributeValue xmlAttributeValue) { BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode(); quotedString.pos = getPosition(xmlAttributeValue); if (xmlAttributeValue.startQuote().kind() == SyntaxKind.SINGLE_QUOTE_TOKEN) { quotedString.quoteType = QuoteType.SINGLE_QUOTE; } else { quotedString.quoteType = QuoteType.DOUBLE_QUOTE; } if (xmlAttributeValue.value().isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = getPosition(xmlAttributeValue); quotedString.textFragments.add(emptyLiteral); } else if (xmlAttributeValue.value().size() == 1 && xmlAttributeValue.value().get(0).kind() == SyntaxKind.INTERPOLATION) { quotedString.textFragments.add(createExpression(xmlAttributeValue.value().get(0))); BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = getPosition(xmlAttributeValue); quotedString.textFragments.add(emptyLiteral); } else { for (Node value : xmlAttributeValue.value()) { if (value.kind() == SyntaxKind.XML_TEXT_CONTENT) { Token token = (Token) value; String normalizedValue = XmlFactory.XMLTextUnescape.unescape(token.text()); quotedString.textFragments.add(createStringLiteral(normalizedValue, getPosition(value))); } else { quotedString.textFragments.add(createExpression(value)); } } } return quotedString; } @Override public BLangNode transform(XMLStartTagNode startTagNode) { return startTagNode.name().apply(this); } @Override public BLangNode transform(XMLEndTagNode endTagNode) { return endTagNode.name().apply(this); } @Override public BLangNode transform(XMLTextNode xmlTextNode) { return createExpression(xmlTextNode.content()); } private BLangNode createXMLEmptyLiteral(TemplateExpressionNode expressionNode) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.pos = getPosition(expressionNode); xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos)); return xmlTextLiteral; } private BLangNode createXMLTextLiteral(List<Node> expressionNode) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.pos = getPosition(expressionNode.get(0)); for (Node node : expressionNode) { xmlTextLiteral.textFragments.add(createExpression(node)); } xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos)); return xmlTextLiteral; } private BLangNode createXMLTextLiteral(Node expressionNode) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.pos = getPosition(expressionNode); xmlTextLiteral.textFragments.add(createExpression(expressionNode)); return xmlTextLiteral; } @Override public BLangNode transform(XMLNamespaceDeclarationNode xmlnsDeclNode) { BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode(); BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null)); BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri()); xmlns.namespaceURI = namespaceUri; xmlns.prefix = prefixIdentifier; xmlns.pos = getPosition(xmlnsDeclNode); BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode(); xmlnsStmt.xmlnsDecl = xmlns; xmlnsStmt.pos = getPosition(xmlnsDeclNode); return xmlnsStmt; } @Override public BLangNode transform(ModuleXMLNamespaceDeclarationNode xmlnsDeclNode) { BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode(); BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null)); BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri()); xmlns.namespaceURI = namespaceUri; xmlns.prefix = prefixIdentifier; xmlns.pos = getPosition(xmlnsDeclNode); return xmlns; } @Override public BLangNode transform(XMLQualifiedNameNode xmlQualifiedNameNode) { BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode(); xmlName.localname = createIdentifier(getPosition(xmlQualifiedNameNode.name()), xmlQualifiedNameNode.name().name()); xmlName.prefix = createIdentifier(getPosition(xmlQualifiedNameNode.prefix()), xmlQualifiedNameNode.prefix().name()); xmlName.pos = getPosition(xmlQualifiedNameNode); return xmlName; } @Override public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) { BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode(); xmlName.localname = createIdentifier(xmlSimpleNameNode.name()); xmlName.prefix = createIdentifier(null, ""); xmlName.pos = getPosition(xmlSimpleNameNode); return xmlName; } @Override public BLangNode transform(XMLEmptyElementNode xMLEmptyElementNode) { BLangXMLElementLiteral xmlEmptyElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode(); xmlEmptyElement.startTagName = createExpression(xMLEmptyElementNode.name()); for (XMLAttributeNode attribute : xMLEmptyElementNode.attributes()) { xmlEmptyElement.attributes.add((BLangXMLAttribute) attribute.apply(this)); } xmlEmptyElement.pos = getPosition(xMLEmptyElementNode); return xmlEmptyElement; } @Override public BLangNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) { BLangInvocation.BLangActionInvocation bLangActionInvocation = (BLangInvocation.BLangActionInvocation) TreeBuilder.createActionInvocation(); bLangActionInvocation.expr = createExpression(remoteMethodCallActionNode.expression()); bLangActionInvocation.argExprs = applyAll(remoteMethodCallActionNode.arguments()); BLangNameReference nameReference = createBLangNameReference(remoteMethodCallActionNode.methodName().name()); bLangActionInvocation.name = (BLangIdentifier) nameReference.name; bLangActionInvocation.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; bLangActionInvocation.pos = getPosition(remoteMethodCallActionNode); return bLangActionInvocation; } @Override public BLangNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) { BLangType constraint, error = null; Location pos = getPosition(streamTypeDescriptorNode); Optional<Node> paramsNode = streamTypeDescriptorNode.streamTypeParamsNode(); boolean hasConstraint = paramsNode.isPresent(); if (!hasConstraint) { constraint = addValueType(pos, TypeKind.ANY); } else { StreamTypeParamsNode params = (StreamTypeParamsNode) paramsNode.get(); if (params.rightTypeDescNode().isPresent()) { error = createTypeNode(params.rightTypeDescNode().get()); } constraint = createTypeNode(params.leftTypeDescNode()); } BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); refType.typeKind = TypeKind.STREAM; refType.pos = pos; BLangStreamType streamType = (BLangStreamType) TreeBuilder.createStreamTypeNode(); streamType.type = refType; streamType.constraint = constraint; streamType.error = error; streamType.pos = pos; return streamType; } @Override public BLangNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) { int dimensions = 1; List<BLangExpression> sizes = new ArrayList<>(); Location position = getPosition(arrayTypeDescriptorNode); while (true) { if (arrayTypeDescriptorNode.arrayLength().isEmpty()) { sizes.add(new BLangLiteral(OPEN_ARRAY_INDICATOR, symTable.intType)); } else { Node keyExpr = arrayTypeDescriptorNode.arrayLength().get(); if (keyExpr.kind() == SyntaxKind.NUMERIC_LITERAL) { Token literalToken = ((BasicLiteralNode) keyExpr).literalToken(); if (literalToken.kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) { sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text()), symTable.intType)); } else { sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text(), 16), symTable.intType)); } } else if (keyExpr.kind() == SyntaxKind.ASTERISK_LITERAL) { sizes.add(new BLangLiteral(INFERRED_ARRAY_INDICATOR, symTable.intType)); } else { sizes.add(createExpression(keyExpr)); } } if (arrayTypeDescriptorNode.memberTypeDesc().kind() != SyntaxKind.ARRAY_TYPE_DESC) { break; } arrayTypeDescriptorNode = (ArrayTypeDescriptorNode) arrayTypeDescriptorNode.memberTypeDesc(); dimensions++; } BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode(); arrayTypeNode.pos = position; arrayTypeNode.elemtype = createTypeNode(arrayTypeDescriptorNode.memberTypeDesc()); arrayTypeNode.dimensions = dimensions; arrayTypeNode.sizes = sizes.toArray(new BLangExpression[0]); return arrayTypeNode; } public BLangNode transform(EnumDeclarationNode enumDeclarationNode) { Boolean publicQualifier = false; if (enumDeclarationNode.qualifier().isPresent() && enumDeclarationNode.qualifier().get().kind() == SyntaxKind.PUBLIC_KEYWORD) { publicQualifier = true; } for (Node member : enumDeclarationNode.enumMemberList()) { EnumMemberNode enumMember = (EnumMemberNode) member; if (enumMember.identifier().isMissing()) { continue; } addToTop(transformEnumMember(enumMember, publicQualifier)); } BLangTypeDefinition bLangTypeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); if (publicQualifier) { bLangTypeDefinition.flagSet.add(Flag.PUBLIC); } bLangTypeDefinition.flagSet.add(Flag.ENUM); bLangTypeDefinition.setName((BLangIdentifier) transform(enumDeclarationNode.identifier())); bLangTypeDefinition.pos = getPosition(enumDeclarationNode); BLangUnionTypeNode bLangUnionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode(); bLangUnionTypeNode.pos = bLangTypeDefinition.pos; for (Node member : enumDeclarationNode.enumMemberList()) { Node enumMemberIdentifier = ((EnumMemberNode) member).identifier(); if (enumMemberIdentifier.isMissing()) { continue; } bLangUnionTypeNode.memberTypeNodes.add(createTypeNode(enumMemberIdentifier)); } Collections.reverse(bLangUnionTypeNode.memberTypeNodes); bLangTypeDefinition.setTypeNode(bLangUnionTypeNode); bLangTypeDefinition.annAttachments = applyAll(getAnnotations(enumDeclarationNode.metadata())); bLangTypeDefinition.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(enumDeclarationNode.metadata())); return bLangTypeDefinition; } public BLangConstant transformEnumMember(EnumMemberNode member, Boolean publicQualifier) { BLangConstant bLangConstant = (BLangConstant) TreeBuilder.createConstantNode(); bLangConstant.pos = getPosition(member); bLangConstant.flagSet.add(Flag.CONSTANT); bLangConstant.flagSet.add(Flag.ENUM_MEMBER); if (publicQualifier) { bLangConstant.flagSet.add(Flag.PUBLIC); } bLangConstant.annAttachments = applyAll(getAnnotations(member.metadata())); bLangConstant.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(member.metadata())); bLangConstant.setName((BLangIdentifier) transform(member.identifier())); BLangExpression deepLiteral; if (member.constExprNode().isPresent()) { BLangExpression expression = createExpression(member.constExprNode().orElse(null)); bLangConstant.setInitialExpression(expression); deepLiteral = createExpression(member.constExprNode().orElse(null)); } else { BLangLiteral literal = createSimpleLiteral(member.identifier()); bLangConstant.setInitialExpression(literal); deepLiteral = createSimpleLiteral(member.identifier()); } BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); typeNode.pos = symTable.builtinPos; typeNode.typeKind = TypeKind.STRING; bLangConstant.setTypeNode(typeNode); if (deepLiteral instanceof BLangLiteral) { BLangLiteral literal = (BLangLiteral) deepLiteral; if (!literal.originalValue.equals("")) { BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); literal.originalValue = null; typeNodeAssociated.addValue(deepLiteral); bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated); } else { bLangConstant.associatedTypeDefinition = null; } } else { BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode(); typeNodeAssociated.addValue(deepLiteral); bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated); } return bLangConstant; } @Override public BLangNode transform(QueryExpressionNode queryExprNode) { BLangQueryExpr queryExpr = (BLangQueryExpr) TreeBuilder.createQueryExpressionNode(); queryExpr.pos = getPosition(queryExprNode); BLangFromClause fromClause = (BLangFromClause) queryExprNode.queryPipeline().fromClause().apply(this); queryExpr.queryClauseList.add(fromClause); for (Node clauseNode : queryExprNode.queryPipeline().intermediateClauses()) { queryExpr.queryClauseList.add(clauseNode.apply(this)); } BLangSelectClause selectClause = (BLangSelectClause) queryExprNode.selectClause().apply(this); queryExpr.queryClauseList.add(selectClause); Optional<OnConflictClauseNode> onConflict = queryExprNode.onConflictClause(); onConflict.ifPresent(onConflictClauseNode -> queryExpr.queryClauseList.add(onConflictClauseNode.apply(this))); boolean isTable = false; boolean isStream = false; Optional<QueryConstructTypeNode> optionalQueryConstructTypeNode = queryExprNode.queryConstructType(); if (optionalQueryConstructTypeNode.isPresent()) { QueryConstructTypeNode queryConstructTypeNode = optionalQueryConstructTypeNode.get(); isTable = queryConstructTypeNode.keyword().kind() == SyntaxKind.TABLE_KEYWORD; isStream = queryConstructTypeNode.keyword().kind() == SyntaxKind.STREAM_KEYWORD; if (queryConstructTypeNode.keySpecifier().isPresent()) { for (IdentifierToken fieldNameNode : queryConstructTypeNode.keySpecifier().get().fieldNames()) { queryExpr.fieldNameIdentifierList.add(createIdentifier(getPosition(fieldNameNode), fieldNameNode)); } } } queryExpr.isStream = isStream; queryExpr.isTable = isTable; return queryExpr; } public BLangNode transform(OnFailClauseNode onFailClauseNode) { Location pos = getPosition(onFailClauseNode); BLangSimpleVariableDef variableDefinitionNode = (BLangSimpleVariableDef) TreeBuilder. createSimpleVariableDefinitionNode(); BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); boolean isDeclaredWithVar = onFailClauseNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; var.isDeclaredWithVar = isDeclaredWithVar; if (!isDeclaredWithVar) { var.setTypeNode(createTypeNode(onFailClauseNode.typeDescriptor())); } var.pos = getPosition(onFailClauseNode); var.setName(this.createIdentifier(onFailClauseNode.failErrorName())); var.name.pos = getPosition(onFailClauseNode.failErrorName()); variableDefinitionNode.setVariable(var); variableDefinitionNode.pos = var.name.pos; BLangOnFailClause onFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode(); onFailClause.pos = pos; onFailClause.isDeclaredWithVar = isDeclaredWithVar; markVariableWithFlag(variableDefinitionNode.getVariable(), Flag.FINAL); onFailClause.variableDefinitionNode = variableDefinitionNode; BLangBlockStmt blockNode = (BLangBlockStmt) transform(onFailClauseNode.blockStatement()); blockNode.pos = getPosition(onFailClauseNode); onFailClause.body = blockNode; return onFailClause; } @Override public BLangNode transform(LetClauseNode letClauseNode) { BLangLetClause bLLetClause = (BLangLetClause) TreeBuilder.createLetClauseNode(); bLLetClause.pos = getPosition(letClauseNode); List<BLangLetVariable> letVars = new ArrayList<>(); for (LetVariableDeclarationNode letVarDeclr : letClauseNode.letVarDeclarations()) { BLangLetVariable letVar = createLetVariable(letVarDeclr); letVar.definitionNode.getVariable().addFlag(Flag.FINAL); letVars.add(letVar); } if (!letVars.isEmpty()) { bLLetClause.letVarDeclarations = letVars; } return bLLetClause; } @Override public BLangNode transform(FromClauseNode fromClauseNode) { BLangFromClause fromClause = (BLangFromClause) TreeBuilder.createFromClauseNode(); fromClause.pos = getPosition(fromClauseNode); fromClause.collection = createExpression(fromClauseNode.expression()); TypedBindingPatternNode bindingPatternNode = fromClauseNode.typedBindingPattern(); fromClause.variableDefinitionNode = createBLangVarDef(getPosition(bindingPatternNode), bindingPatternNode, Optional.empty(), Optional.empty()); boolean isDeclaredWithVar = bindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; fromClause.isDeclaredWithVar = isDeclaredWithVar; return fromClause; } @Override public BLangNode transform(WhereClauseNode whereClauseNode) { BLangWhereClause whereClause = (BLangWhereClause) TreeBuilder.createWhereClauseNode(); whereClause.pos = getPosition(whereClauseNode); whereClause.expression = createExpression(whereClauseNode.expression()); return whereClause; } @Override public BLangNode transform(SelectClauseNode selectClauseNode) { BLangSelectClause selectClause = (BLangSelectClause) TreeBuilder.createSelectClauseNode(); selectClause.pos = getPosition(selectClauseNode); selectClause.expression = createExpression(selectClauseNode.expression()); return selectClause; } @Override public BLangNode transform(OnConflictClauseNode onConflictClauseNode) { BLangOnConflictClause onConflictClause = (BLangOnConflictClause) TreeBuilder.createOnConflictClauseNode(); onConflictClause.pos = getPosition(onConflictClauseNode); onConflictClause.expression = createExpression(onConflictClauseNode.expression()); return onConflictClause; } @Override public BLangNode transform(LimitClauseNode limitClauseNode) { BLangLimitClause selectClause = (BLangLimitClause) TreeBuilder.createLimitClauseNode(); selectClause.pos = getPosition(limitClauseNode); selectClause.expression = createExpression(limitClauseNode.expression()); return selectClause; } @Override public BLangNode transform(OnClauseNode onClauseNode) { BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode(); onClause.pos = getPosition(onClauseNode); onClause.lhsExpr = createExpression(onClauseNode.lhsExpression()); onClause.rhsExpr = createExpression(onClauseNode.rhsExpression()); return onClause; } @Override public BLangNode transform(JoinClauseNode joinClauseNode) { BLangJoinClause joinClause = (BLangJoinClause) TreeBuilder.createJoinClauseNode(); joinClause.pos = getPosition(joinClauseNode); TypedBindingPatternNode typedBindingPattern = joinClauseNode.typedBindingPattern(); joinClause.variableDefinitionNode = createBLangVarDef(getPosition(joinClauseNode), typedBindingPattern, Optional.empty(), Optional.empty()); joinClause.collection = createExpression(joinClauseNode.expression()); joinClause.isDeclaredWithVar = typedBindingPattern.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC; joinClause.isOuterJoin = joinClauseNode.outerKeyword().isPresent(); OnClauseNode onClauseNode = joinClauseNode.joinOnCondition(); BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode(); onClause.pos = getPosition(onClauseNode); if (!onClauseNode.equalsKeyword().isMissing()) { onClause.equalsKeywordPos = getPosition(onClauseNode.equalsKeyword()); } onClause.lhsExpr = createExpression(onClauseNode.lhsExpression()); onClause.rhsExpr = createExpression(onClauseNode.rhsExpression()); joinClause.onClause = onClause; return joinClause; } @Override public BLangNode transform(OrderByClauseNode orderByClauseNode) { BLangOrderByClause orderByClause = (BLangOrderByClause) TreeBuilder.createOrderByClauseNode(); orderByClause.pos = getPosition(orderByClauseNode); for (OrderKeyNode orderKeyNode : orderByClauseNode.orderKey()) { orderByClause.addOrderKey(createOrderKey(orderKeyNode)); } return orderByClause; } public BLangOrderKey createOrderKey(OrderKeyNode orderKeyNode) { BLangOrderKey orderKey = (BLangOrderKey) TreeBuilder.createOrderKeyNode(); orderKey.pos = getPosition(orderKeyNode); orderKey.expression = createExpression(orderKeyNode.expression()); if (orderKeyNode.orderDirection().isPresent() && orderKeyNode.orderDirection().get().text().equals("descending")) { orderKey.isAscending = false; } else { orderKey.isAscending = true; } return orderKey; } @Override public BLangNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) { BLangType lhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.leftTypeDesc()); BLangType rhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.rightTypeDesc()); BLangIntersectionTypeNode intersectionType; if (rhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) { intersectionType = (BLangIntersectionTypeNode) rhsType; intersectionType.constituentTypeNodes.add(0, lhsType); } else if (lhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) { intersectionType = (BLangIntersectionTypeNode) lhsType; intersectionType.constituentTypeNodes.add(rhsType); } else { intersectionType = (BLangIntersectionTypeNode) TreeBuilder.createIntersectionTypeNode(); intersectionType.constituentTypeNodes.add(lhsType); intersectionType.constituentTypeNodes.add(rhsType); } intersectionType.pos = getPosition(intersectionTypeDescriptorNode); return intersectionType; } @Override public BLangNode transform(InferredTypedescDefaultNode inferDefaultValueNode) { BLangInferredTypedescDefaultNode inferTypedescExpr = (BLangInferredTypedescDefaultNode) TreeBuilder.createInferTypedescExpressionNode(); inferTypedescExpr.pos = getPosition(inferDefaultValueNode); return inferTypedescExpr; } @Override protected BLangNode transformSyntaxNode(Node node) { throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName()); } @Override public BLangNode transform(ServiceDeclarationNode serviceDeclarationNode) { Location pos = getPositionWithoutMetadata(serviceDeclarationNode); BLangClassDefinition anonClassDef = transformObjectCtorExpressionBody(serviceDeclarationNode.members()); anonClassDef.isServiceDecl = true; anonClassDef.pos = pos; anonClassDef.flagSet.add(SERVICE); setClassQualifiers(serviceDeclarationNode.qualifiers(), anonClassDef); List<IdentifierNode> absResourcePathPath = new ArrayList<>(); NodeList<Node> pathList = serviceDeclarationNode.absoluteResourcePath(); BLangLiteral serviceNameLiteral = null; if (pathList.size() == 1 && pathList.get(0).kind() == SyntaxKind.STRING_LITERAL) { serviceNameLiteral = (BLangLiteral) createExpression(pathList.get(0)); } else { for (var token : pathList) { String text = ((Token) token).text(); if (pathList.size() == 1 && text.equals("/")) { absResourcePathPath.add(createIdentifier((Token) token)); } else if (!text.equals("/")) { absResourcePathPath.add(createIdentifier((Token) token)); } } } String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID); IdentifierNode anonTypeGenName = createIdentifier(pos, genName); anonClassDef.setName(anonTypeGenName); anonClassDef.flagSet.add(Flag.PUBLIC); Optional<TypeDescriptorNode> typeReference = serviceDeclarationNode.typeDescriptor(); typeReference.ifPresent(typeReferenceNode -> { BLangType typeNode = createTypeNode(typeReferenceNode); anonClassDef.typeRefs.add(typeNode); }); anonClassDef.annAttachments = applyAll(getAnnotations(serviceDeclarationNode.metadata())); anonClassDef.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(serviceDeclarationNode.metadata())); addToTop(anonClassDef); BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode(); BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClassDef.name); BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode(); initNode.pos = pos; initNode.userDefinedType = userDefinedType; BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier pkgAlias = createIdentifier(pos, ""); BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName)); invocationNode.name = (BLangIdentifier) nameReference.name; invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; initNode.argsExpr.addAll(invocationNode.argExprs); initNode.initInvocation = invocationNode; BLangSimpleVariable serviceVariable = createServiceVariable(pos, anonClassDef, initNode); List<BLangExpression> exprs = new ArrayList<>(); for (var exp : serviceDeclarationNode.expressions()) { exprs.add(createExpression(exp)); } BLangService service = (BLangService) TreeBuilder.createServiceNode(); service.serviceVariable = serviceVariable; service.attachedExprs = exprs; service.serviceClass = anonClassDef; service.absoluteResourcePath = absResourcePathPath; service.serviceNameLiteral = serviceNameLiteral; service.annAttachments = anonClassDef.annAttachments; service.pos = pos; service.name = createIdentifier(pos, anonymousModelHelper.getNextAnonymousServiceVarKey(packageID)); return service; } private BLangSimpleVariable createServiceVariable(Location pos, BLangClassDefinition annonClassDef, BLangTypeInit initNode) { BLangUserDefinedType typeName = createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), annonClassDef.name); BLangSimpleVariable serviceInstance = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); serviceInstance.typeNode = typeName; String serviceVarName = anonymousModelHelper.getNextAnonymousServiceVarKey(packageID); serviceInstance.name = createIdentifier(pos, serviceVarName); serviceInstance.expr = initNode; serviceInstance.internal = true; return serviceInstance; } @Override public BLangNode transform(ClassDefinitionNode classDefinitionNode) { BLangClassDefinition blangClass = (BLangClassDefinition) TreeBuilder.createClassDefNode(); blangClass.pos = getPositionWithoutMetadata(classDefinitionNode); blangClass.annAttachments = applyAll(getAnnotations(classDefinitionNode.metadata())); BLangIdentifier identifierNode = createIdentifier(classDefinitionNode.className()); blangClass.setName(identifierNode); blangClass.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(getDocumentationString(classDefinitionNode.metadata())); classDefinitionNode.visibilityQualifier().ifPresent(visibilityQual -> { if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) { blangClass.flagSet.add(Flag.PUBLIC); } }); setClassQualifiers(classDefinitionNode.classTypeQualifiers(), blangClass); NodeList<Node> members = classDefinitionNode.members(); for (Node node : members) { BLangNode bLangNode = node.apply(this); if (bLangNode.getKind() == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) { BLangFunction bLangFunction = (BLangFunction) bLangNode; bLangFunction.attachedFunction = true; bLangFunction.flagSet.add(Flag.ATTACHED); if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) { if (blangClass.initFunction == null) { bLangFunction.objInitFunction = true; blangClass.initFunction = bLangFunction; } else { blangClass.addFunction(bLangFunction); } } else { blangClass.addFunction(bLangFunction); } } else if (bLangNode.getKind() == NodeKind.VARIABLE) { blangClass.addField((BLangSimpleVariable) bLangNode); } else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) { blangClass.addTypeReference((BLangType) bLangNode); } } return blangClass; } @Override public BLangNode transform(RetryStatementNode retryStatementNode) { BLangRetrySpec retrySpec = createRetrySpec(retryStatementNode); Location pos = getPosition(retryStatementNode); StatementNode retryBody = retryStatementNode.retryBody(); if (retryBody.kind() == SyntaxKind.TRANSACTION_STATEMENT) { BLangRetryTransaction retryTransaction = (BLangRetryTransaction) TreeBuilder.createRetryTransactionNode(); retryTransaction.pos = pos; retryTransaction.setRetrySpec(retrySpec); retryTransaction.setTransaction((BLangTransaction) retryBody.apply(this)); return retryTransaction; } BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode(); retryNode.pos = pos; retryNode.setRetrySpec(retrySpec); BLangBlockStmt retryBlock = (BLangBlockStmt) retryBody.apply(this); retryNode.setRetryBody(retryBlock); retryStatementNode.onFailClause().ifPresent(onFailClauseNode -> { retryNode.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); return retryNode; } private BLangRetrySpec createRetrySpec(RetryStatementNode retryStatementNode) { BLangRetrySpec retrySpec = (BLangRetrySpec) TreeBuilder.createRetrySpecNode(); if (retryStatementNode.typeParameter().isPresent()) { TypeParameterNode typeParam = retryStatementNode.typeParameter().get(); retrySpec.retryManagerType = createTypeNode(typeParam.typeNode()); retrySpec.pos = getPosition(typeParam); } if (retryStatementNode.arguments().isPresent()) { ParenthesizedArgList arg = retryStatementNode.arguments().get(); if (retryStatementNode.typeParameter().isPresent()) { retrySpec.pos = getPosition(retryStatementNode.typeParameter().get(), arg); } else { retrySpec.pos = getPosition(arg); } for (Node argNode : arg.arguments()) { retrySpec.argExprs.add(createExpression(argNode)); } } if (retrySpec.pos == null) { retrySpec.pos = getPosition(retryStatementNode); } return retrySpec; } @Override public BLangNode transform(TransactionalExpressionNode transactionalExpressionNode) { BLangTransactionalExpr transactionalExpr = TreeBuilder.createTransactionalExpressionNode(); transactionalExpr.pos = getPosition(transactionalExpressionNode); return transactionalExpr; } @Override public BLangNode transform(XMLFilterExpressionNode xmlFilterExpressionNode) { List<BLangXMLElementFilter> filters = new ArrayList<>(); XMLNamePatternChainingNode xmlNamePatternChainingNode = xmlFilterExpressionNode.xmlPatternChain(); for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) { filters.add(createXMLElementFilter(node)); } BLangExpression expr = createExpression(xmlFilterExpressionNode.expression()); BLangXMLElementAccess elementAccess = new BLangXMLElementAccess(getPosition(xmlFilterExpressionNode), null, expr, filters); return elementAccess; } @Override public BLangNode transform(XMLStepExpressionNode xmlStepExpressionNode) { List<BLangXMLElementFilter> filters = new ArrayList<>(); int starCount = 0; if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.SLASH_ASTERISK_TOKEN) { starCount = 1; } else if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.XML_NAME_PATTERN_CHAIN) { XMLNamePatternChainingNode xmlNamePatternChainingNode = (XMLNamePatternChainingNode) xmlStepExpressionNode.xmlStepStart(); for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) { filters.add(createXMLElementFilter(node)); } switch (xmlNamePatternChainingNode.startToken().kind()) { case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: starCount = 2; break; case SLASH_ASTERISK_TOKEN: starCount = 1; break; } } BLangExpression expr = createExpression(xmlStepExpressionNode.expression()); BLangXMLNavigationAccess xmlNavigationAccess = new BLangXMLNavigationAccess(getPosition(xmlStepExpressionNode), null, expr, filters, XMLNavigationAccess.NavAccessType.fromInt(starCount), null); return xmlNavigationAccess; } @Override public BLangNode transform(MatchStatementNode matchStatementNode) { BLangMatchStatement matchStatement = (BLangMatchStatement) TreeBuilder.createMatchStatementNode(); BLangExpression matchStmtExpr = createExpression(matchStatementNode.condition()); matchStatement.setExpression(matchStmtExpr); for (MatchClauseNode matchClauseNode : matchStatementNode.matchClauses()) { BLangMatchClause bLangMatchClause = (BLangMatchClause) TreeBuilder.createMatchClause(); bLangMatchClause.pos = getPosition(matchClauseNode); bLangMatchClause.expr = matchStmtExpr; boolean matchGuardAvailable = false; if (matchClauseNode.matchGuard().isPresent()) { matchGuardAvailable = true; BLangMatchGuard bLangMatchGuard = (BLangMatchGuard) TreeBuilder.createMatchGuard(); bLangMatchGuard.expr = createExpression(matchClauseNode.matchGuard().get().expression()); bLangMatchGuard.pos = getPosition(matchClauseNode.matchGuard().get()); bLangMatchClause.setMatchGuard(bLangMatchGuard); } for (Node matchPattern : matchClauseNode.matchPatterns()) { BLangMatchPattern bLangMatchPattern = transformMatchPattern(matchPattern); if (bLangMatchPattern != null) { bLangMatchPattern.matchExpr = matchStmtExpr; bLangMatchPattern.matchGuardIsAvailable = matchGuardAvailable; bLangMatchClause.addMatchPattern(bLangMatchPattern); } } bLangMatchClause.setBlockStatement((BLangBlockStmt) transform(matchClauseNode.blockStatement())); matchStatement.addMatchClause(bLangMatchClause); } matchStatementNode.onFailClause().ifPresent(onFailClauseNode -> { matchStatement.setOnFailClause( (org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this))); }); matchStatement.pos = getPosition(matchStatementNode); return matchStatement; } private BLangXMLSequenceLiteral createXmlSequence(TemplateExpressionNode expressionNode) { BLangXMLSequenceLiteral xmlSequenceLiteral = (BLangXMLSequenceLiteral) TreeBuilder.createXMLSequenceLiteralNode(); xmlSequenceLiteral.pos = getPosition(expressionNode); Node lastNode = null; List<Node> adjacentTextNodes = new ArrayList<>(); int xmlContentSize = expressionNode.content().size(); for (int index = 0; index < xmlContentSize; index++) { Node childItem = expressionNode.content().get(index); if (childItem.kind() == SyntaxKind.XML_TEXT || childItem.kind() == SyntaxKind.INTERPOLATION) { adjacentTextNodes.add(childItem); lastNode = childItem; if (index != xmlContentSize - 1) { continue; } } if (lastNode != null && (lastNode.kind() == SyntaxKind.XML_TEXT || lastNode.kind() == SyntaxKind.INTERPOLATION)) { if (adjacentTextNodes.size() > 1) { xmlSequenceLiteral.xmlItems.add((BLangExpression) createXMLTextLiteral(adjacentTextNodes)); } else { xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(lastNode)); } adjacentTextNodes.clear(); if (lastNode.kind() == childItem.kind()) { continue; } } xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(childItem)); lastNode = childItem; } return xmlSequenceLiteral; } public BLangExpression createXmlSingletonItem(Node xmlTypeNode) { switch (xmlTypeNode.kind()) { case XML_COMMENT: case XML_PI: case XML_ELEMENT: case XML_EMPTY_ELEMENT: return createExpression(xmlTypeNode); default: return (BLangExpression) createXMLTextLiteral(xmlTypeNode); } } public BLangNode createXmlTemplateLiteral(TemplateExpressionNode expressionNode) { if (expressionNode.content().isEmpty()) { return createXMLEmptyLiteral(expressionNode); } if (expressionNode.content().size() == 1) { return createXmlSingletonItem(expressionNode.content().get(0)); } return createXmlSequence(expressionNode); } private BLangMatchPattern transformMatchPattern(Node matchPattern) { Location matchPatternPos = matchPattern.location(); SyntaxKind kind = matchPattern.kind(); if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE && ((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) { BLangWildCardMatchPattern bLangWildCardMatchPattern = (BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern(); bLangWildCardMatchPattern.pos = matchPatternPos; return bLangWildCardMatchPattern; } if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) { BLangWildCardMatchPattern bLangWildCardMatchPattern = (BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern(); bLangWildCardMatchPattern.pos = matchPatternPos; return bLangWildCardMatchPattern; } if (kind == SyntaxKind.TYPED_BINDING_PATTERN) { TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern; BLangVarBindingPatternMatchPattern bLangVarBindingPattern = (BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern(); bLangVarBindingPattern.pos = matchPatternPos; bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern())); return bLangVarBindingPattern; } if (kind == SyntaxKind.ERROR_MATCH_PATTERN) { return transformErrorMatchPattern((ErrorMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) { return transformNamedArgMatchPattern((NamedArgMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.LIST_MATCH_PATTERN) { return transformListMatchPattern((ListMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.REST_MATCH_PATTERN) { return transformRestMatchPattern((RestMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) { return transformMappingMatchPattern((MappingMatchPatternNode) matchPattern, matchPatternPos); } if (kind == SyntaxKind.FIELD_MATCH_PATTERN) { return transformFieldMatchPattern((FieldMatchPatternNode) matchPattern, matchPatternPos); } assert (kind == SyntaxKind.NUMERIC_LITERAL || kind == SyntaxKind.STRING_LITERAL || kind == SyntaxKind.SIMPLE_NAME_REFERENCE || kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.IDENTIFIER_TOKEN || kind == SyntaxKind.NULL_LITERAL || kind == SyntaxKind.NIL_LITERAL || kind == SyntaxKind.BOOLEAN_LITERAL || kind == SyntaxKind.UNARY_EXPRESSION); BLangConstPattern bLangConstMatchPattern = (BLangConstPattern) TreeBuilder.createConstMatchPattern(); bLangConstMatchPattern.setExpression(createExpression(matchPattern)); bLangConstMatchPattern.pos = matchPatternPos; return bLangConstMatchPattern; } private BLangErrorMatchPattern transformErrorMatchPattern(ErrorMatchPatternNode errorMatchPatternNode, Location pos) { BLangErrorMatchPattern bLangErrorMatchPattern = (BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern(); bLangErrorMatchPattern.pos = pos; NameReferenceNode nameReferenceNode; if (errorMatchPatternNode.typeReference().isPresent()) { nameReferenceNode = errorMatchPatternNode.typeReference().get(); bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode); } if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) { return bLangErrorMatchPattern; } Node node = errorMatchPatternNode.argListMatchPatternNode().get(0); if (isErrorFieldMatchPattern(node)) { createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern); return bLangErrorMatchPattern; } bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node); if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) { return bLangErrorMatchPattern; } node = errorMatchPatternNode.argListMatchPatternNode().get(1); if (isErrorFieldMatchPattern(node)) { createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern); return bLangErrorMatchPattern; } bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node); createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern); return bLangErrorMatchPattern; } private BLangNamedArgMatchPattern transformNamedArgMatchPattern(NamedArgMatchPatternNode namedArgMatchPatternNode, Location pos) { BLangNamedArgMatchPattern bLangNamedArgMatchPattern = (BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern(); bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier()); bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern()); bLangNamedArgMatchPattern.pos = pos; return bLangNamedArgMatchPattern; } private BLangListMatchPattern transformListMatchPattern(ListMatchPatternNode listMatchPatternNode, Location pos) { BLangListMatchPattern bLangListMatchPattern = (BLangListMatchPattern) TreeBuilder.createListMatchPattern(); bLangListMatchPattern.pos = pos; SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns(); int matchPatternListSize = matchPatterns.size(); if (matchPatternListSize == 0) { return bLangListMatchPattern; } for (int i = 0; i < matchPatternListSize - 1; i++) { BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i)); if (bLangMemberMatchPattern == null) { continue; } bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern); } BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1)); if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) { bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember); } else { bLangListMatchPattern.addMatchPattern(lastMember); } return bLangListMatchPattern; } private BLangRestMatchPattern transformRestMatchPattern(RestMatchPatternNode restMatchPatternNode, Location pos) { BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern(); bLangRestMatchPattern.pos = pos; SimpleNameReferenceNode variableName = restMatchPatternNode.variableName(); bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name())); return bLangRestMatchPattern; } private BLangMappingMatchPattern transformMappingMatchPattern(MappingMatchPatternNode mappingMatchPatternNode, Location pos) { BLangMappingMatchPattern bLangMappingMatchPattern = (BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern(); bLangMappingMatchPattern.pos = pos; SeparatedNodeList<Node> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns(); int fieldMatchPatternListSize = fieldMatchPatterns.size(); if (fieldMatchPatternListSize == 0) { return bLangMappingMatchPattern; } for (int i = 0; i < fieldMatchPatternListSize - 1; i++) { bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern) transformMatchPattern(fieldMatchPatterns.get(i))); } BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1)); if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) { bLangMappingMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember); } else { bLangMappingMatchPattern.addFieldMatchPattern((BLangFieldMatchPattern) lastMember); } return bLangMappingMatchPattern; } private BLangFieldMatchPattern transformFieldMatchPattern(FieldMatchPatternNode fieldMatchPatternNode, Location pos) { BLangFieldMatchPattern bLangFieldMatchPattern = (BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern(); bLangFieldMatchPattern.pos = pos; bLangFieldMatchPattern.fieldName = createIdentifier(fieldMatchPatternNode.fieldNameNode()); bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern()); return bLangFieldMatchPattern; } private BLangBindingPattern transformBindingPattern(Node bindingPattern) { Location pos = getPosition(bindingPattern); SyntaxKind patternKind = bindingPattern.kind(); switch (patternKind) { case CAPTURE_BINDING_PATTERN: return transformCaptureBindingPattern((CaptureBindingPatternNode) bindingPattern, pos); case LIST_BINDING_PATTERN: return transformListBindingPattern((ListBindingPatternNode) bindingPattern, pos); case NAMED_ARG_BINDING_PATTERN: return transformNamedArgBindingPattern((NamedArgBindingPatternNode) bindingPattern, pos); case REST_BINDING_PATTERN: return transformRestBindingPattern((RestBindingPatternNode) bindingPattern, pos); case MAPPING_BINDING_PATTERN: return transformMappingBindingPattern((MappingBindingPatternNode) bindingPattern, pos); case FIELD_BINDING_PATTERN: return transformFieldBindingPattern(bindingPattern, pos); case ERROR_BINDING_PATTERN: return transformErrorBindingPattern((ErrorBindingPatternNode) bindingPattern, pos); case WILDCARD_BINDING_PATTERN: default: assert patternKind == SyntaxKind.WILDCARD_BINDING_PATTERN; return transformWildCardBindingPattern(pos); } } private BLangWildCardBindingPattern transformWildCardBindingPattern(Location pos) { BLangWildCardBindingPattern bLangWildCardBindingPattern = (BLangWildCardBindingPattern) TreeBuilder.createWildCardBindingPattern(); bLangWildCardBindingPattern.pos = pos; return bLangWildCardBindingPattern; } private BLangCaptureBindingPattern transformCaptureBindingPattern(CaptureBindingPatternNode captureBindingPattern, Location pos) { BLangCaptureBindingPattern bLangCaptureBindingPattern = (BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern(); bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPattern.variableName())); bLangCaptureBindingPattern.pos = pos; return bLangCaptureBindingPattern; } private BLangRestBindingPattern transformRestBindingPattern(RestBindingPatternNode restBindingPatternNode, Location pos) { BLangRestBindingPattern bLangRestBindingPattern = (BLangRestBindingPattern) TreeBuilder.createRestBindingPattern(); bLangRestBindingPattern.pos = pos; SimpleNameReferenceNode variableName = restBindingPatternNode.variableName(); bLangRestBindingPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name())); return bLangRestBindingPattern; } private BLangListBindingPattern transformListBindingPattern(ListBindingPatternNode listBindingPatternNode, Location pos) { BLangListBindingPattern bLangListBindingPattern = (BLangListBindingPattern) TreeBuilder.createListBindingPattern(); bLangListBindingPattern.pos = pos; for (Node listMemberBindingPattern : listBindingPatternNode.bindingPatterns()) { if (listMemberBindingPattern.kind() != SyntaxKind.REST_BINDING_PATTERN) { bLangListBindingPattern.addBindingPattern(transformBindingPattern(listMemberBindingPattern)); continue; } bLangListBindingPattern.restBindingPattern = (BLangRestBindingPattern) transformBindingPattern(listMemberBindingPattern); } return bLangListBindingPattern; } private BLangMappingBindingPattern transformMappingBindingPattern(MappingBindingPatternNode mappingBindingPatternNode, Location pos) { BLangMappingBindingPattern bLangMappingBindingPattern = (BLangMappingBindingPattern) TreeBuilder.createMappingBindingPattern(); bLangMappingBindingPattern.pos = pos; for (Node fieldBindingPattern : mappingBindingPatternNode.fieldBindingPatterns()) { if (fieldBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) { bLangMappingBindingPattern.restBindingPattern = (BLangRestBindingPattern) transformBindingPattern(fieldBindingPattern); continue; } bLangMappingBindingPattern.fieldBindingPatterns.add( (BLangFieldBindingPattern) transformBindingPattern(fieldBindingPattern)); } return bLangMappingBindingPattern; } private BLangFieldBindingPattern transformFieldBindingPattern(Node bindingPattern, Location pos) { BLangFieldBindingPattern bLangFieldBindingPattern = (BLangFieldBindingPattern) TreeBuilder.createFieldBindingPattern(); bLangFieldBindingPattern.pos = pos; if (bindingPattern instanceof FieldBindingPatternVarnameNode) { FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode = (FieldBindingPatternVarnameNode) bindingPattern; BLangIdentifier fieldName = createIdentifier(fieldBindingPatternVarnameNode.variableName().name()); bLangFieldBindingPattern.fieldName = fieldName; BLangCaptureBindingPattern bLangCaptureBindingPatternInFieldBindingPattern = (BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern(); bLangCaptureBindingPatternInFieldBindingPattern.setIdentifier(fieldName); bLangCaptureBindingPatternInFieldBindingPattern.pos = pos; bLangFieldBindingPattern.bindingPattern = bLangCaptureBindingPatternInFieldBindingPattern; return bLangFieldBindingPattern; } FieldBindingPatternFullNode fieldBindingPatternNode = (FieldBindingPatternFullNode) bindingPattern; bLangFieldBindingPattern.fieldName = createIdentifier(fieldBindingPatternNode.variableName().name()); bLangFieldBindingPattern.bindingPattern = transformBindingPattern(fieldBindingPatternNode.bindingPattern()); return bLangFieldBindingPattern; } private BLangNamedArgBindingPattern transformNamedArgBindingPattern(NamedArgBindingPatternNode namedArgBindingPattern, Location pos) { BLangNamedArgBindingPattern bLangNamedArgBindingPattern = (BLangNamedArgBindingPattern) TreeBuilder.createNamedArgBindingPattern(); bLangNamedArgBindingPattern.pos = pos; bLangNamedArgBindingPattern.argName = createIdentifier(namedArgBindingPattern.argName()); bLangNamedArgBindingPattern.bindingPattern = transformBindingPattern(namedArgBindingPattern.bindingPattern()); return bLangNamedArgBindingPattern; } private BLangErrorBindingPattern transformErrorBindingPattern(ErrorBindingPatternNode errorBindingPatternNode, Location pos) { BLangErrorBindingPattern bLangErrorBindingPattern = (BLangErrorBindingPattern) TreeBuilder.createErrorBindingPattern(); bLangErrorBindingPattern.pos = pos; if (errorBindingPatternNode.typeReference().isPresent()) { Node nameReferenceNode = errorBindingPatternNode.typeReference().get(); bLangErrorBindingPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode); } if (errorBindingPatternNode.argListBindingPatterns().size() == 0) { return bLangErrorBindingPattern; } Node node = errorBindingPatternNode.argListBindingPatterns().get(0); if (isErrorFieldBindingPattern(node)) { createErrorFieldBindingPatterns(0, errorBindingPatternNode, bLangErrorBindingPattern); return bLangErrorBindingPattern; } bLangErrorBindingPattern.errorMessageBindingPattern = createErrorMessageBindingPattern(node); if (errorBindingPatternNode.argListBindingPatterns().size() == 1) { return bLangErrorBindingPattern; } node = errorBindingPatternNode.argListBindingPatterns().get(1); if (isErrorFieldBindingPattern(node)) { createErrorFieldBindingPatterns(1, errorBindingPatternNode, bLangErrorBindingPattern); return bLangErrorBindingPattern; } bLangErrorBindingPattern.errorCauseBindingPattern = createErrorCauseBindingPattern(node); createErrorFieldBindingPatterns(2, errorBindingPatternNode, bLangErrorBindingPattern); return bLangErrorBindingPattern; } private boolean isErrorFieldMatchPattern(Node node) { return node.kind() == SyntaxKind.NAMED_ARG_MATCH_PATTERN || node.kind() == SyntaxKind.REST_MATCH_PATTERN; } private boolean isErrorFieldBindingPattern(Node node) { return node.kind() == SyntaxKind.NAMED_ARG_BINDING_PATTERN || node.kind() == SyntaxKind.REST_BINDING_PATTERN; } private BLangErrorMessageMatchPattern createErrorMessageMatchPattern(Node node) { BLangMatchPattern matchPattern = transformMatchPattern(node); BLangErrorMessageMatchPattern bLangErrorMessageMatchPattern = (BLangErrorMessageMatchPattern) TreeBuilder.createErrorMessageMatchPattern(); bLangErrorMessageMatchPattern.pos = getPosition(node); bLangErrorMessageMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern); return bLangErrorMessageMatchPattern; } private BLangErrorMessageBindingPattern createErrorMessageBindingPattern(Node node) { BLangBindingPattern bindingPattern = transformBindingPattern(node); BLangErrorMessageBindingPattern bLangErrorMessageBindingPattern = (BLangErrorMessageBindingPattern) TreeBuilder.createErrorMessageBindingPattern(); bLangErrorMessageBindingPattern.pos = getPosition(node); bLangErrorMessageBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern); return bLangErrorMessageBindingPattern; } private BLangErrorCauseMatchPattern createErrorCauseMatchPattern(Node node) { BLangMatchPattern matchPattern = transformMatchPattern(node); BLangErrorCauseMatchPattern bLangErrorCauseMatchPattern = (BLangErrorCauseMatchPattern) TreeBuilder.createErrorCauseMatchPattern(); bLangErrorCauseMatchPattern.pos = getPosition(node); if (matchPattern.getKind() == NodeKind.ERROR_MATCH_PATTERN) { bLangErrorCauseMatchPattern.errorMatchPattern = (BLangErrorMatchPattern) matchPattern; return bLangErrorCauseMatchPattern; } bLangErrorCauseMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern); return bLangErrorCauseMatchPattern; } private BLangErrorCauseBindingPattern createErrorCauseBindingPattern(Node node) { BLangBindingPattern bindingPattern = transformBindingPattern(node); BLangErrorCauseBindingPattern bLangErrorCauseBindingPattern = (BLangErrorCauseBindingPattern) TreeBuilder.createErrorCauseBindingPattern(); bLangErrorCauseBindingPattern.pos = getPosition(node); if (bindingPattern.getKind() == NodeKind.ERROR_BINDING_PATTERN) { bLangErrorCauseBindingPattern.errorBindingPattern = (BLangErrorBindingPattern) bindingPattern; return bLangErrorCauseBindingPattern; } bLangErrorCauseBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern); return bLangErrorCauseBindingPattern; } private BLangErrorFieldMatchPatterns createErrorFieldMatchPattern(Node errorFieldMatchPatternNode, BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns) { BLangMatchPattern matchPattern = transformMatchPattern(errorFieldMatchPatternNode); bLangErrorFieldMatchPatterns.pos = getPosition(errorFieldMatchPatternNode); if (matchPattern.getKind() == NodeKind.NAMED_ARG_MATCH_PATTERN) { bLangErrorFieldMatchPatterns.addNamedArgMatchPattern( (org.ballerinalang.model.tree.matchpatterns.NamedArgMatchPatternNode) matchPattern); } else if (matchPattern.getKind() == NodeKind.REST_MATCH_PATTERN) { bLangErrorFieldMatchPatterns.restMatchPattern = (BLangRestMatchPattern) matchPattern; } return bLangErrorFieldMatchPatterns; } private BLangErrorFieldBindingPatterns createErrorFieldBindingPattern(Node errorFieldBindingPatternNode, BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns) { BLangBindingPattern bindingPattern = transformBindingPattern(errorFieldBindingPatternNode); bLangErrorFieldBindingPatterns.pos = getPosition(errorFieldBindingPatternNode); if (bindingPattern.getKind() == NodeKind.NAMED_ARG_BINDING_PATTERN) { bLangErrorFieldBindingPatterns. addNamedArgBindingPattern( (org.ballerinalang.model.tree.bindingpattern.NamedArgBindingPatternNode) bindingPattern); } else if (bindingPattern.getKind() == NodeKind.REST_BINDING_PATTERN) { bLangErrorFieldBindingPatterns.restBindingPattern = (BLangRestBindingPattern) bindingPattern; } return bLangErrorFieldBindingPatterns; } private void createErrorFieldMatchPatterns(int index, ErrorMatchPatternNode errorMatchPatternNode, BLangErrorMatchPattern bLangErrorMatchPattern) { BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns = (BLangErrorFieldMatchPatterns) TreeBuilder.createErrorFieldMatchPattern(); for (int i = index; i < errorMatchPatternNode.argListMatchPatternNode().size(); i++) { Node errorFieldMatchPatternNode = errorMatchPatternNode.argListMatchPatternNode().get(i); bLangErrorMatchPattern.errorFieldMatchPatterns = createErrorFieldMatchPattern(errorFieldMatchPatternNode, bLangErrorFieldMatchPatterns); } } private void createErrorFieldBindingPatterns(int index, ErrorBindingPatternNode errorBindingPatternNode, BLangErrorBindingPattern bLangErrorBindingPattern) { BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns = (BLangErrorFieldBindingPatterns) TreeBuilder.createErrorFieldBindingPattern(); for (int i = index; i < errorBindingPatternNode.argListBindingPatterns().size(); i++) { Node errorFieldBindingPatternNode = errorBindingPatternNode.argListBindingPatterns().get(i); bLangErrorBindingPattern.errorFieldBindingPatterns = createErrorFieldBindingPattern(errorFieldBindingPatternNode, bLangErrorFieldBindingPatterns); } } private BLangSimpleMatchPattern createSimpleMatchPattern(BLangNode bLangNode) { BLangSimpleMatchPattern bLangSimpleMatchPattern = (BLangSimpleMatchPattern) TreeBuilder.createSimpleMatchPattern(); NodeKind kind = bLangNode.getKind(); switch (kind) { case WILDCARD_MATCH_PATTERN: bLangSimpleMatchPattern.wildCardMatchPattern = (BLangWildCardMatchPattern) bLangNode; break; case CONST_MATCH_PATTERN: bLangSimpleMatchPattern.constPattern = (BLangConstPattern) bLangNode; break; case VAR_BINDING_PATTERN_MATCH_PATTERN: bLangSimpleMatchPattern.varVariableName = (BLangVarBindingPatternMatchPattern) bLangNode; break; } return bLangSimpleMatchPattern; } private BLangCaptureBindingPattern createCaptureBindingPattern(CaptureBindingPatternNode captureBindingPatternNode) { BLangCaptureBindingPattern bLangCaptureBindingPattern = (BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern(); bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPatternNode .variableName())); bLangCaptureBindingPattern.pos = getPosition(captureBindingPatternNode); return bLangCaptureBindingPattern; } private BLangSimpleBindingPattern createSimpleBindingPattern(BLangNode bLangNode) { BLangSimpleBindingPattern bLangSimpleBindingPattern = (BLangSimpleBindingPattern) TreeBuilder.createSimpleBindingPattern(); NodeKind kind = bLangNode.getKind(); switch (kind) { case WILDCARD_BINDING_PATTERN: bLangSimpleBindingPattern.wildCardBindingPattern = (BLangWildCardBindingPattern) bLangNode; break; case CAPTURE_BINDING_PATTERN: bLangSimpleBindingPattern.captureBindingPattern = (BLangCaptureBindingPattern) bLangNode; break; } return bLangSimpleBindingPattern; } private BLangXMLElementFilter createXMLElementFilter(Node node) { String ns = ""; String elementName = "*"; Location nsPos = null; Location elemNamePos = null; SyntaxKind kind = node.kind(); switch (kind) { case SIMPLE_NAME_REFERENCE: SimpleNameReferenceNode simpleNameReferenceNode = (SimpleNameReferenceNode) node; elementName = simpleNameReferenceNode.name().text(); elemNamePos = getPosition(simpleNameReferenceNode); break; case QUALIFIED_NAME_REFERENCE: QualifiedNameReferenceNode qualifiedNameReferenceNode = (QualifiedNameReferenceNode) node; elementName = qualifiedNameReferenceNode.identifier().text(); elemNamePos = getPosition(qualifiedNameReferenceNode.identifier()); ns = qualifiedNameReferenceNode.modulePrefix().text(); nsPos = getPosition(qualifiedNameReferenceNode.modulePrefix()); break; case XML_ATOMIC_NAME_PATTERN: XMLAtomicNamePatternNode atomicNamePatternNode = (XMLAtomicNamePatternNode) node; elementName = atomicNamePatternNode.name().text(); elemNamePos = getPosition(atomicNamePatternNode.name()); ns = atomicNamePatternNode.prefix().text(); nsPos = getPosition(atomicNamePatternNode.prefix()); break; case ASTERISK_TOKEN: elemNamePos = getPosition(node); } if (stringStartsWithSingleQuote(ns)) { ns = ns.substring(1); } if (stringStartsWithSingleQuote(elementName)) { elementName = elementName.substring(1); } return new BLangXMLElementFilter(getPosition(node), null, ns, nsPos, elementName, elemNamePos); } private boolean stringStartsWithSingleQuote(String ns) { return ns != null && ns.length() > 0 && ns.charAt(0) == '\''; } private String getValueFromByteArrayNode(ByteArrayLiteralNode byteArrayLiteralNode) { StringBuilder value = new StringBuilder(); value.append(byteArrayLiteralNode.type().text()); value.append(" "); value.append("`"); if (byteArrayLiteralNode.content().isPresent()) { value.append(byteArrayLiteralNode.content().get().text()); } value.append("`"); return value.toString(); } private BLangRecordVariable createBLangRecordVariable(MappingBindingPatternNode mappingBindingPatternNode) { BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode(); List<BLangRecordVariableKeyValue> fieldBindingPatternsList = new ArrayList<>(); for (BindingPatternNode node : mappingBindingPatternNode.fieldBindingPatterns()) { BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue(); if (node instanceof FieldBindingPatternFullNode) { FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) node; recordKeyValue.key = createIdentifier(fullNode.variableName().name()); recordKeyValue.valueBindingPattern = getBLangVariableNode(fullNode.bindingPattern()); } else if (node instanceof FieldBindingPatternVarnameNode) { FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) node; recordKeyValue.key = createIdentifier(varnameNode.variableName().name()); BLangSimpleVariable value = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); value.pos = getPosition(varnameNode); IdentifierNode name = createIdentifier(varnameNode.variableName().name()); ((BLangIdentifier) name).pos = value.pos; value.setName(name); recordKeyValue.valueBindingPattern = value; } else { recordVariable.restParam = getBLangVariableNode(node); break; } fieldBindingPatternsList.add(recordKeyValue); } recordVariable.variableList = fieldBindingPatternsList; recordVariable.pos = getPosition(mappingBindingPatternNode); return recordVariable; } private BLangLiteral createEmptyLiteral() { BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); bLiteral.value = ""; bLiteral.originalValue = ""; bLiteral.setBType(symTable.getTypeFromTag(TypeTags.STRING)); return bLiteral; } private BLangVariable createSimpleVariable(Location location, Token identifier, Location identifierPos) { BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); memberVar.pos = location; IdentifierNode name = createIdentifier(identifierPos, identifier); ((BLangIdentifier) name).pos = identifierPos; memberVar.setName(name); return memberVar; } private BLangVariable getBLangVariableNode(BindingPatternNode bindingPattern) { Token varName; switch (bindingPattern.kind()) { case MAPPING_BINDING_PATTERN: MappingBindingPatternNode mappingBindingPatternNode = (MappingBindingPatternNode) bindingPattern; return createBLangRecordVariable(mappingBindingPatternNode); case LIST_BINDING_PATTERN: ListBindingPatternNode listBindingPatternNode = (ListBindingPatternNode) bindingPattern; BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode(); tupleVariable.pos = getPosition(listBindingPatternNode); for (BindingPatternNode memberBindingPattern : listBindingPatternNode.bindingPatterns()) { if (memberBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) { tupleVariable.restVariable = getBLangVariableNode(memberBindingPattern); } else { BLangVariable member = getBLangVariableNode(memberBindingPattern); tupleVariable.memberVariables.add(member); } } return tupleVariable; case ERROR_BINDING_PATTERN: ErrorBindingPatternNode errorBindingPatternNode = (ErrorBindingPatternNode) bindingPattern; BLangErrorVariable bLangErrorVariable = (BLangErrorVariable) TreeBuilder.createErrorVariableNode(); bLangErrorVariable.pos = getPosition(errorBindingPatternNode); Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference(); if (errorTypeRef.isPresent()) { bLangErrorVariable.typeNode = createTypeNode(errorTypeRef.get()); } SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns(); int numberOfArgs = argListBindingPatterns.size(); List<BLangErrorVariable.BLangErrorDetailEntry> namedArgs = new ArrayList<>(); for (int position = 0; position < numberOfArgs; position++) { BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position); switch (bindingPatternNode.kind()) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: if (position == 0) { bLangErrorVariable.message = (BLangSimpleVariable) getBLangVariableNode(bindingPatternNode); break; } case ERROR_BINDING_PATTERN: bLangErrorVariable.cause = getBLangVariableNode(bindingPatternNode); break; case NAMED_ARG_BINDING_PATTERN: NamedArgBindingPatternNode namedArgBindingPatternNode = (NamedArgBindingPatternNode) bindingPatternNode; BLangIdentifier key = createIdentifier(namedArgBindingPatternNode.argName()); BLangVariable valueBindingPattern = getBLangVariableNode(namedArgBindingPatternNode.bindingPattern()); BLangErrorVariable.BLangErrorDetailEntry detailEntry = new BLangErrorVariable.BLangErrorDetailEntry(key, valueBindingPattern); namedArgs.add(detailEntry); break; default: bLangErrorVariable.restDetail = (BLangSimpleVariable) getBLangVariableNode(bindingPatternNode); } } bLangErrorVariable.detail = namedArgs; return bLangErrorVariable; case REST_BINDING_PATTERN: RestBindingPatternNode restBindingPatternNode = (RestBindingPatternNode) bindingPattern; varName = restBindingPatternNode.variableName().name(); break; case WILDCARD_BINDING_PATTERN: WildcardBindingPatternNode wildcardBindingPatternNode = (WildcardBindingPatternNode) bindingPattern; varName = wildcardBindingPatternNode.underscoreToken(); break; case CAPTURE_BINDING_PATTERN: default: CaptureBindingPatternNode captureBindingPatternNode = (CaptureBindingPatternNode) bindingPattern; varName = captureBindingPatternNode.variableName(); break; } Location pos = getPosition(bindingPattern); return createSimpleVariable(pos, varName, getPosition(varName)); } BLangValueType addValueType(Location pos, TypeKind typeKind) { BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); typeNode.pos = pos; typeNode.typeKind = typeKind; return typeNode; } private List<BLangStatement> generateBLangStatements(NodeList<StatementNode> statementNodes) { List<BLangStatement> statements = new ArrayList<>(); return generateAndAddBLangStatements(statementNodes, statements); } private List<BLangStatement> generateAndAddBLangStatements(NodeList<StatementNode> statementNodes, List<BLangStatement> statements) { for (StatementNode statement : statementNodes) { if (statement != null) { if (statement.kind() == SyntaxKind.FORK_STATEMENT) { generateForkStatements(statements, (ForkStatementNode) statement); continue; } statements.add((BLangStatement) statement.apply(this)); } } return statements; } private String extractVersion(SeparatedNodeList<Token> versionNumbers) { StringBuilder version = new StringBuilder(); int size = versionNumbers.size(); for (int i = 0; i < size; i++) { if (i != 0) { version.append("."); } version.append(versionNumbers.get(i).text()); } return version.toString(); } private void generateForkStatements(List<BLangStatement> statements, ForkStatementNode forkStatementNode) { BLangForkJoin forkJoin = (BLangForkJoin) forkStatementNode.apply(this); String nextAnonymousForkKey = anonymousModelHelper.getNextAnonymousForkKey(packageID); for (NamedWorkerDeclarationNode workerDeclarationNode : forkStatementNode.namedWorkerDeclarations()) { BLangSimpleVariableDef workerDef = (BLangSimpleVariableDef) workerDeclarationNode.apply(this); workerDef.isWorker = true; workerDef.isInFork = true; workerDef.var.flagSet.add(Flag.FORKED); BLangFunction function = ((BLangLambdaFunction) workerDef.var.expr).function; function.addFlag(Flag.FORKED); function.anonForkName = nextAnonymousForkKey; statements.add(workerDef); while (!this.additionalStatements.empty()) { statements.add(additionalStatements.pop()); } forkJoin.addWorkers(workerDef); } statements.add(forkJoin); } private BLangCheckedExpr createCheckExpr(Location pos, BLangExpression expr) { BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode(); checkedExpr.pos = pos; checkedExpr.expr = expr; return checkedExpr; } private BLangCheckPanickedExpr createCheckPanickedExpr(Location pos, BLangExpression expr) { BLangCheckPanickedExpr checkPanickedExpr = (BLangCheckPanickedExpr) TreeBuilder.createCheckPanicExpressionNode(); checkPanickedExpr.pos = pos; checkPanickedExpr.expr = expr; return checkPanickedExpr; } private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) { for (ParameterNode child : funcSignature.parameters()) { SimpleVariableNode param = (SimpleVariableNode) child.apply(this); if (child instanceof RestParameterNode) { bLFunction.setRestParameter(param); } else { bLFunction.addParameter(param); } } Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc(); if (retNode.isPresent()) { ReturnTypeDescriptorNode returnType = retNode.get(); bLFunction.setReturnTypeNode(createTypeNode(returnType.type())); bLFunction.returnTypeAnnAttachments = applyAll(returnType.annotations()); } else { BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode(); bLValueType.pos = symTable.builtinPos; bLValueType.typeKind = TypeKind.NIL; bLFunction.setReturnTypeNode(bLValueType); } } private BLangUnaryExpr createBLangUnaryExpr(Location location, OperatorKind operatorKind, BLangExpression expr) { BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode(); bLUnaryExpr.pos = location; bLUnaryExpr.operator = operatorKind; bLUnaryExpr.expr = expr; return bLUnaryExpr; } private BLangExpression createExpression(Node expression) { if (expression.kind() == SyntaxKind.ASYNC_SEND_ACTION) { dlog.error(getPosition(expression), DiagnosticErrorCode.ASYNC_SEND_NOT_YET_SUPPORTED_AS_EXPRESSION); Token missingIdentifier = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN, NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList()); expression = NodeFactory.createSimpleNameReferenceNode(missingIdentifier); } return (BLangExpression) createActionOrExpression(expression); } private BLangNode createActionOrExpression(Node actionOrExpression) { if (isSimpleLiteral(actionOrExpression.kind())) { return createSimpleLiteral(actionOrExpression); } else if (actionOrExpression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE || actionOrExpression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || actionOrExpression.kind() == SyntaxKind.IDENTIFIER_TOKEN) { BLangNameReference nameReference = createBLangNameReference(actionOrExpression); BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); bLVarRef.pos = getPosition(actionOrExpression); bLVarRef.pkgAlias = this.createIdentifier((Location) nameReference.pkgAlias.getPosition(), nameReference.pkgAlias.getValue()); bLVarRef.variableName = this.createIdentifier((Location) nameReference.name.getPosition(), nameReference.name.getValue()); return bLVarRef; } else if (actionOrExpression.kind() == SyntaxKind.BRACED_EXPRESSION) { BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode(); group.expression = (BLangExpression) actionOrExpression.apply(this); group.pos = getPosition(actionOrExpression); return group; } else if (isType(actionOrExpression.kind())) { BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode(); typeAccessExpr.pos = getPosition(actionOrExpression); typeAccessExpr.typeNode = createTypeNode(actionOrExpression); return typeAccessExpr; } else { return actionOrExpression.apply(this); } } private BLangNode createStringTemplateLiteral(NodeList<Node> memberNodes, Location location) { BLangStringTemplateLiteral stringTemplateLiteral = (BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode(); for (Node memberNode : memberNodes) { stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this)); } if (stringTemplateLiteral.exprs.isEmpty()) { BLangLiteral emptyLiteral = createEmptyLiteral(); emptyLiteral.pos = location; stringTemplateLiteral.exprs.add(emptyLiteral); } stringTemplateLiteral.pos = location; return stringTemplateLiteral; } private BLangRawTemplateLiteral createRawTemplateLiteral(NodeList<Node> members, Location location) { BLangRawTemplateLiteral literal = (BLangRawTemplateLiteral) TreeBuilder.createRawTemplateLiteralNode(); literal.pos = location; boolean prevNodeWasInterpolation = false; Node firstMember = members.isEmpty() ? null : members.get(0); if (firstMember != null && firstMember.kind() == SyntaxKind.INTERPOLATION) { literal.strings.add(createStringLiteral("", getPosition(firstMember))); } for (Node member : members) { if (member.kind() == SyntaxKind.INTERPOLATION) { literal.insertions.add((BLangExpression) member.apply(this)); if (prevNodeWasInterpolation) { literal.strings.add(createStringLiteral("", getPosition(member))); } prevNodeWasInterpolation = true; } else { literal.strings.add((BLangLiteral) member.apply(this)); prevNodeWasInterpolation = false; } } if (prevNodeWasInterpolation) { literal.strings.add(createStringLiteral("", getPosition(members.get(members.size() - 1)))); } return literal; } private BLangSimpleVariable createSimpleVar(Optional<Token> name, Node type, NodeList<AnnotationNode> annotations) { if (name.isPresent()) { Token nameToken = name.get(); return createSimpleVar(nameToken, type, null, null, annotations); } return createSimpleVar(null, type, null, null, annotations); } private BLangSimpleVariable createSimpleVar(Token name, Node type, NodeList<AnnotationNode> annotations) { return createSimpleVar(name, type, null, null, annotations); } private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer, Token visibilityQualifier, NodeList<AnnotationNode> annotations) { BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); bLSimpleVar.setName(this.createIdentifier(name)); bLSimpleVar.name.pos = getPosition(name); if (isDeclaredWithVar(typeName)) { bLSimpleVar.isDeclaredWithVar = true; } else { bLSimpleVar.setTypeNode(createTypeNode(typeName)); } if (visibilityQualifier != null) { if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) { bLSimpleVar.flagSet.add(Flag.PRIVATE); } else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) { bLSimpleVar.flagSet.add(Flag.PUBLIC); } } if (initializer != null) { bLSimpleVar.setInitialExpression(createExpression(initializer)); } if (annotations != null) { bLSimpleVar.annAttachments = applyAll(annotations); } return bLSimpleVar; } private boolean isDeclaredWithVar(Node typeNode) { if (typeNode == null || typeNode.kind() == SyntaxKind.VAR_TYPE_DESC) { return true; } return false; } private BLangIdentifier createIdentifier(Token token) { return createIdentifier(getPosition(token), token); } private BLangIdentifier createIdentifier(Location pos, Token token) { if (token == null) { return createIdentifier(pos, null, null); } String identifierName = token.text(); if (token.isMissing() || identifierName.equals(IDENTIFIER_LITERAL_PREFIX)) { identifierName = missingNodesHelper.getNextMissingNodeName(packageID); } return createIdentifier(pos, identifierName); } private BLangIdentifier createIdentifier(Location pos, String value) { return createIdentifier(pos, value, null); } private BLangIdentifier createIdentifier(Location pos, String value, Set<Whitespace> ws) { BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode(); if (value == null) { return bLIdentifer; } if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) { bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value.substring(1))); bLIdentifer.originalValue = value; bLIdentifer.setLiteral(true); } else { bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value)); bLIdentifer.setLiteral(false); } bLIdentifer.pos = pos; if (ws != null) { bLIdentifer.addWS(ws); } return bLIdentifer; } private BLangLiteral createEmptyStringLiteral(Location pos) { BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); bLiteral.pos = pos; bLiteral.setBType(symTable.stringType); bLiteral.value = ""; bLiteral.originalValue = ""; return bLiteral; } private BLangLiteral createSimpleLiteral(Node literal) { return createSimpleLiteral(literal, false); } private BLangLiteral createSimpleLiteral(Node literal, boolean isFiniteType) { if (literal.kind() == SyntaxKind.UNARY_EXPRESSION) { UnaryExpressionNode unaryExpr = (UnaryExpressionNode) literal; BLangLiteral bLangLiteral = createSimpleLiteral(unaryExpr.expression(), unaryExpr.unaryOperator().kind(), isFiniteType); bLangLiteral.pos = getPosition(unaryExpr); return bLangLiteral; } return createSimpleLiteral(literal, SyntaxKind.NONE, isFiniteType); } private BLangLiteral createSimpleLiteral(Node literal, SyntaxKind sign, boolean isFiniteType) { BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); SyntaxKind type = literal.kind(); int typeTag = -1; Object value = null; String originalValue = null; String textValue; if (literal instanceof BasicLiteralNode) { textValue = ((BasicLiteralNode) literal).literalToken().text(); } else if (literal instanceof Token) { textValue = ((Token) literal).text(); } else { textValue = ""; } if (sign == SyntaxKind.PLUS_TOKEN) { textValue = "+" + textValue; } else if (sign == SyntaxKind.MINUS_TOKEN) { textValue = "-" + textValue; } if (type == SyntaxKind.NUMERIC_LITERAL) { SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind(); if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { typeTag = TypeTags.INT; value = getIntegerLiteral(literal, textValue, sign); originalValue = textValue; bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN && withinByteRange(value)) { typeTag = TypeTags.BYTE; } } else if (literalTokenKind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN) { typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT; if (isFiniteType) { value = textValue.replaceAll("[fd+]", ""); originalValue = textValue.replace("+", ""); } else { value = textValue; originalValue = textValue; } bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); } else if (literalTokenKind == SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN) { typeTag = TypeTags.FLOAT; value = getHexNodeValue(textValue); originalValue = textValue; bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); } } else if (type == SyntaxKind.BOOLEAN_LITERAL) { typeTag = TypeTags.BOOLEAN; value = Boolean.parseBoolean(textValue); originalValue = textValue; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT || type == SyntaxKind.TEMPLATE_STRING || type == SyntaxKind.IDENTIFIER_TOKEN) { String text = textValue; if (type == SyntaxKind.STRING_LITERAL) { if (text.length() > 1 && text.charAt(text.length() - 1) == '"') { text = text.substring(1, text.length() - 1); } else { text = text.substring(1); } } String originalText = text; Location pos = getPosition(literal); Matcher matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text); int position = 0; while (matcher.find(position)) { String hexStringVal = matcher.group(1); int hexDecimalVal = Integer.parseInt(hexStringVal, 16); if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE) || hexDecimalVal > Constants.MAX_UNICODE) { String hexStringWithBraces = matcher.group(0); int offset = originalText.indexOf(hexStringWithBraces) + 1; dlog.error(new BLangDiagnosticLocation(currentCompUnitName, pos.lineRange().startLine().line(), pos.lineRange().endLine().line(), pos.lineRange().startLine().offset() + offset, pos.lineRange().startLine().offset() + offset + hexStringWithBraces.length()), DiagnosticErrorCode.INVALID_UNICODE, hexStringWithBraces); } text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal)); position = matcher.end() - 2; matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text); } if (type != SyntaxKind.TEMPLATE_STRING && type != SyntaxKind.XML_TEXT_CONTENT) { try { text = StringEscapeUtils.unescapeJava(text); } catch (Exception e) { dlog.error(pos, DiagnosticErrorCode.INVALID_UNICODE, originalText); } } typeTag = TypeTags.STRING; value = text; originalValue = textValue; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.NIL_LITERAL) { originalValue = "()"; typeTag = TypeTags.NIL; value = "()"; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.NULL_LITERAL) { originalValue = "null"; typeTag = TypeTags.NIL; value = "null"; bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } else if (type == SyntaxKind.BINARY_EXPRESSION) { typeTag = TypeTags.BYTE_ARRAY; value = textValue; originalValue = textValue; if (isNumericLiteral(type)) { bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression(); } else { bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); } } else if (type == SyntaxKind.BYTE_ARRAY_LITERAL) { return (BLangLiteral) literal.apply(this); } bLiteral.pos = getPosition(literal); bLiteral.setBType(symTable.getTypeFromTag(typeTag)); bLiteral.getBType().tag = typeTag; bLiteral.value = value; bLiteral.originalValue = originalValue; return bLiteral; } private BLangLiteral createStringLiteral(String value, Location pos) { BLangLiteral strLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); strLiteral.value = strLiteral.originalValue = value; strLiteral.setBType(symTable.stringType); strLiteral.pos = pos; return strLiteral; } private BLangType createTypeNode(Node type) { if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) { return createBuiltInTypeNode(type); } else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) { BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); BLangNameReference nameReference = createBLangNameReference(type); bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name; bLUserDefinedType.pos = getPosition(type); return bLUserDefinedType; } else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) { if (type.hasDiagnostics()) { BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); BLangIdentifier pkgAlias = this.createIdentifier(null, ""); BLangIdentifier name = this.createIdentifier(((SimpleNameReferenceNode) type).name()); BLangNameReference nameReference = new BLangNameReference(getPosition(type), null, pkgAlias, name); bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias; bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name; bLUserDefinedType.pos = getPosition(type); return bLUserDefinedType; } SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type; return createTypeNode(nameReferenceNode.name()); } return (BLangType) type.apply(this); } private BLangType createBuiltInTypeNode(Node type) { String typeText; if (type.kind() == SyntaxKind.NIL_TYPE_DESC) { typeText = "()"; } else if (type instanceof BuiltinSimpleNameReferenceNode) { BuiltinSimpleNameReferenceNode simpleNameRef = (BuiltinSimpleNameReferenceNode) type; if (simpleNameRef.kind() == SyntaxKind.VAR_TYPE_DESC) { return null; } else if (simpleNameRef.name().isMissing()) { String name = missingNodesHelper.getNextMissingNodeName(packageID); BLangIdentifier identifier = createIdentifier(getPosition(simpleNameRef.name()), name); BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); return createUserDefinedType(getPosition(type), pkgAlias, identifier); } typeText = simpleNameRef.name().text(); } else { typeText = ((Token) type).text(); } TypeKind typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", "")); SyntaxKind kind = type.kind(); switch (kind) { case BOOLEAN_TYPE_DESC: case INT_TYPE_DESC: case BYTE_TYPE_DESC: case FLOAT_TYPE_DESC: case DECIMAL_TYPE_DESC: case STRING_TYPE_DESC: case ANY_TYPE_DESC: case NIL_TYPE_DESC: case HANDLE_TYPE_DESC: case ANYDATA_TYPE_DESC: case READONLY_TYPE_DESC: BLangValueType valueType = (BLangValueType) TreeBuilder.createValueTypeNode(); valueType.typeKind = typeKind; valueType.pos = getPosition(type); return valueType; default: BLangBuiltInRefTypeNode builtInValueType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode(); builtInValueType.typeKind = typeKind; builtInValueType.pos = getPosition(type); return builtInValueType; } } private VariableNode createBasicVarNodeWithoutType(Location location, Set<Whitespace> ws, String identifier, Location identifierLocation, ExpressionNode expr) { BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); bLSimpleVar.pos = location; IdentifierNode name = this.createIdentifier(identifierLocation, identifier, ws); ((BLangIdentifier) name).pos = identifierLocation; bLSimpleVar.setName(name); bLSimpleVar.addWS(ws); if (expr != null) { bLSimpleVar.setInitialExpression(expr); } return bLSimpleVar; } private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments, Location position, boolean isAsync) { BLangInvocation bLInvocation; if (isAsync) { bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation(); } else { bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); } BLangNameReference reference = createBLangNameReference(nameNode); bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias; bLInvocation.name = (BLangIdentifier) reference.name; List<BLangExpression> args = new ArrayList<>(); arguments.iterator().forEachRemaining(arg -> args.add(createExpression(arg))); bLInvocation.argExprs = args; bLInvocation.pos = position; return bLInvocation; } private BLangNameReference createBLangNameReference(Node node) { switch (node.kind()) { case QUALIFIED_NAME_REFERENCE: QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node; Token modulePrefix = iNode.modulePrefix(); IdentifierToken identifier = iNode.identifier(); BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix); Location namePos = getPosition(identifier); BLangIdentifier name = this.createIdentifier(namePos, identifier); return new BLangNameReference(getPosition(node), null, pkgAlias, name); case ERROR_TYPE_DESC: node = ((BuiltinSimpleNameReferenceNode) node).name(); break; case NEW_KEYWORD: case IDENTIFIER_TOKEN: case ERROR_KEYWORD: break; case SIMPLE_NAME_REFERENCE: default: node = ((SimpleNameReferenceNode) node).name(); break; } Token iToken = (Token) node; BLangIdentifier pkgAlias = this.createIdentifier(symTable.builtinPos, ""); BLangIdentifier name = this.createIdentifier(iToken); return new BLangNameReference(getPosition(node), null, pkgAlias, name); } private BLangMarkdownDocumentation createMarkdownDocumentationAttachment(Optional<Node> markdownDocumentationNode) { if (markdownDocumentationNode == null || !markdownDocumentationNode.isPresent()) { return null; } BLangMarkdownDocumentation doc = (BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode(); LinkedList<BLangMarkdownDocumentationLine> documentationLines = new LinkedList<>(); LinkedList<BLangMarkdownParameterDocumentation> parameters = new LinkedList<>(); LinkedList<BLangMarkdownReferenceDocumentation> references = new LinkedList<>(); MarkdownDocumentationNode markdownDocNode = (MarkdownDocumentationNode) markdownDocumentationNode.get(); NodeList<Node> docLineList = markdownDocNode.documentationLines(); BLangMarkdownParameterDocumentation bLangParaDoc = null; BLangMarkdownReturnParameterDocumentation bLangReturnParaDoc = null; BLangMarkDownDeprecationDocumentation bLangDeprecationDoc = null; BLangMarkDownDeprecatedParametersDocumentation bLangDeprecatedParaDoc = null; for (Node singleDocLine : docLineList) { switch (singleDocLine.kind()) { case MARKDOWN_DOCUMENTATION_LINE: case MARKDOWN_REFERENCE_DOCUMENTATION_LINE: MarkdownDocumentationLineNode docLineNode = (MarkdownDocumentationLineNode) singleDocLine; NodeList<Node> docElements = docLineNode.documentElements(); String docText = addReferencesAndReturnDocumentationText(references, docElements); if (bLangDeprecationDoc != null) { bLangDeprecationDoc.deprecationDocumentationLines.add(docText); } else if (bLangReturnParaDoc != null) { bLangReturnParaDoc.returnParameterDocumentationLines.add(docText); } else if (bLangParaDoc != null) { bLangParaDoc.parameterDocumentationLines.add(docText); } else { BLangMarkdownDocumentationLine bLangDocLine = (BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode(); bLangDocLine.text = docText; bLangDocLine.pos = getPosition(docLineNode); documentationLines.add(bLangDocLine); } break; case MARKDOWN_PARAMETER_DOCUMENTATION_LINE: bLangParaDoc = new BLangMarkdownParameterDocumentation(); MarkdownParameterDocumentationLineNode parameterDocLineNode = (MarkdownParameterDocumentationLineNode) singleDocLine; BLangIdentifier paraName = new BLangIdentifier(); Token parameterName = parameterDocLineNode.parameterName(); String parameterNameValue = parameterName.isMissing() ? "" : IdentifierUtils.unescapeUnicodeCodepoints(parameterName.text()); if (stringStartsWithSingleQuote(parameterNameValue)) { parameterNameValue = parameterNameValue.substring(1); } paraName.value = parameterNameValue; bLangParaDoc.parameterName = paraName; NodeList<Node> paraDocElements = parameterDocLineNode.documentElements(); String paraDocText = addReferencesAndReturnDocumentationText(references, paraDocElements); bLangParaDoc.parameterDocumentationLines.add(paraDocText); bLangParaDoc.pos = getPosition(parameterName); if (bLangDeprecatedParaDoc != null) { bLangDeprecatedParaDoc.parameters.add(bLangParaDoc); } else if (bLangDeprecationDoc != null) { bLangDeprecatedParaDoc = new BLangMarkDownDeprecatedParametersDocumentation(); bLangDeprecatedParaDoc.parameters.add(bLangParaDoc); bLangDeprecationDoc = null; } else { parameters.add(bLangParaDoc); } break; case MARKDOWN_RETURN_PARAMETER_DOCUMENTATION_LINE: bLangReturnParaDoc = new BLangMarkdownReturnParameterDocumentation(); MarkdownParameterDocumentationLineNode returnParaDocLineNode = (MarkdownParameterDocumentationLineNode) singleDocLine; NodeList<Node> returnParaDocElements = returnParaDocLineNode.documentElements(); String returnParaDocText = addReferencesAndReturnDocumentationText(references, returnParaDocElements); bLangReturnParaDoc.returnParameterDocumentationLines.add(returnParaDocText); bLangReturnParaDoc.pos = getPosition(returnParaDocLineNode); doc.returnParameter = bLangReturnParaDoc; break; case MARKDOWN_DEPRECATION_DOCUMENTATION_LINE: bLangDeprecationDoc = new BLangMarkDownDeprecationDocumentation(); MarkdownDocumentationLineNode deprecationDocLineNode = (MarkdownDocumentationLineNode) singleDocLine; String lineText = ((Token) deprecationDocLineNode.documentElements().get(0)).text(); bLangDeprecationDoc.addDeprecationLine(" bLangDeprecationDoc.pos = getPosition(deprecationDocLineNode); break; case MARKDOWN_CODE_BLOCK: MarkdownCodeBlockNode codeBlockNode = (MarkdownCodeBlockNode) singleDocLine; transformCodeBlock(documentationLines, codeBlockNode); break; default: break; } } doc.documentationLines = documentationLines; doc.parameters = parameters; doc.references = references; doc.deprecationDocumentation = bLangDeprecationDoc; doc.deprecatedParametersDocumentation = bLangDeprecatedParaDoc; doc.pos = getPosition(markdownDocNode); return doc; } private void transformCodeBlock(LinkedList<BLangMarkdownDocumentationLine> documentationLines, MarkdownCodeBlockNode codeBlockNode) { BLangMarkdownDocumentationLine bLangDocLine = (BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode(); StringBuilder docText = new StringBuilder(); if (codeBlockNode.langAttribute().isPresent()) { docText.append(codeBlockNode.startBacktick().text()); docText.append(codeBlockNode.langAttribute().get().toString()); } else { docText.append(codeBlockNode.startBacktick().toString()); } codeBlockNode.codeLines().forEach(codeLine -> docText.append(codeLine.codeDescription().toString())); docText.append(codeBlockNode.endBacktick().text()); bLangDocLine.text = docText.toString(); bLangDocLine.pos = getPosition(codeBlockNode.startLineHashToken()); documentationLines.add(bLangDocLine); } private String addReferencesAndReturnDocumentationText(LinkedList<BLangMarkdownReferenceDocumentation> references, NodeList<Node> docElements) { StringBuilder docText = new StringBuilder(); for (Node element : docElements) { if (element.kind() == SyntaxKind.BALLERINA_NAME_REFERENCE) { BLangMarkdownReferenceDocumentation bLangRefDoc = new BLangMarkdownReferenceDocumentation(); BallerinaNameReferenceNode balNameRefNode = (BallerinaNameReferenceNode) element; bLangRefDoc.pos = getPosition(balNameRefNode); Token startBacktick = balNameRefNode.startBacktick(); Node backtickContent = balNameRefNode.nameReference(); Token endBacktick = balNameRefNode.endBacktick(); String contentString = backtickContent.isMissing() ? "" : backtickContent.toString(); bLangRefDoc.referenceName = contentString; bLangRefDoc.type = DocumentationReferenceType.BACKTICK_CONTENT; Optional<Token> referenceType = balNameRefNode.referenceType(); referenceType.ifPresent( refType -> { bLangRefDoc.type = stringToRefType(refType.text()); docText.append(refType.toString()); } ); transformDocumentationBacktickContent(backtickContent, bLangRefDoc); docText.append(startBacktick.isMissing() ? "" : startBacktick.text()); docText.append(contentString); docText.append(endBacktick.isMissing() ? "" : endBacktick.text()); references.add(bLangRefDoc); } else if (element.kind() == SyntaxKind.DOCUMENTATION_DESCRIPTION) { Token docDescription = (Token) element; docText.append(docDescription.text()); } else if (element.kind() == SyntaxKind.INLINE_CODE_REFERENCE) { InlineCodeReferenceNode inlineCodeRefNode = (InlineCodeReferenceNode) element; docText.append(inlineCodeRefNode.startBacktick().text()); docText.append(inlineCodeRefNode.codeReference().text()); docText.append(inlineCodeRefNode.endBacktick().text()); } } return trimLeftAtMostOne(docText.toString()); } private String trimLeftAtMostOne(String text) { int countToStrip = 0; if (!text.isEmpty() && Character.isWhitespace(text.charAt(0))) { countToStrip = 1; } return text.substring(countToStrip); } private void transformDocumentationBacktickContent(Node backtickContent, BLangMarkdownReferenceDocumentation bLangRefDoc) { QualifiedNameReferenceNode qualifiedRef; SimpleNameReferenceNode simpleRef; switch (backtickContent.kind()) { case CODE_CONTENT: bLangRefDoc.hasParserWarnings = true; break; case QUALIFIED_NAME_REFERENCE: qualifiedRef = (QualifiedNameReferenceNode) backtickContent; bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text(); bLangRefDoc.identifier = qualifiedRef.identifier().text(); break; case SIMPLE_NAME_REFERENCE: simpleRef = (SimpleNameReferenceNode) backtickContent; bLangRefDoc.identifier = simpleRef.name().text(); break; case FUNCTION_CALL: Node funcName = (((FunctionCallExpressionNode) backtickContent).functionName()); if (funcName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { qualifiedRef = (QualifiedNameReferenceNode) funcName; bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text(); bLangRefDoc.identifier = qualifiedRef.identifier().text(); } else { simpleRef = (SimpleNameReferenceNode) funcName; bLangRefDoc.identifier = simpleRef.name().text(); } break; case METHOD_CALL: MethodCallExpressionNode methodCallExprNode = (MethodCallExpressionNode) backtickContent; bLangRefDoc.identifier = ((SimpleNameReferenceNode) methodCallExprNode.methodName()).name().text(); Node refName = methodCallExprNode.expression(); if (refName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) { qualifiedRef = (QualifiedNameReferenceNode) refName; bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text(); bLangRefDoc.typeName = qualifiedRef.identifier().text(); } else { simpleRef = (SimpleNameReferenceNode) refName; bLangRefDoc.typeName = simpleRef.name().text(); } break; default: throw new IllegalArgumentException("Invalid backtick content transformation"); } if (bLangRefDoc.identifier != null) { bLangRefDoc.identifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.identifier); if (stringStartsWithSingleQuote(bLangRefDoc.identifier)) { bLangRefDoc.identifier = bLangRefDoc.identifier.substring(1); } } if (bLangRefDoc.qualifier != null) { bLangRefDoc.qualifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.qualifier); if (stringStartsWithSingleQuote(bLangRefDoc.qualifier)) { bLangRefDoc.qualifier = bLangRefDoc.qualifier.substring(1); } } } private DocumentationReferenceType stringToRefType(String refTypeName) { switch (refTypeName) { case "type": return DocumentationReferenceType.TYPE; case "service": return DocumentationReferenceType.SERVICE; case "variable": return DocumentationReferenceType.VARIABLE; case "var": return DocumentationReferenceType.VAR; case "annotation": return DocumentationReferenceType.ANNOTATION; case "module": return DocumentationReferenceType.MODULE; case "function": return DocumentationReferenceType.FUNCTION; case "parameter": return DocumentationReferenceType.PARAMETER; case "const": return DocumentationReferenceType.CONST; default: return DocumentationReferenceType.BACKTICK_CONTENT; } } private Object getIntegerLiteral(Node literal, String nodeValue, SyntaxKind sign) { SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind(); if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) { return parseLong(literal, nodeValue, nodeValue, 10, sign, DiagnosticErrorCode.INTEGER_TOO_SMALL, DiagnosticErrorCode.INTEGER_TOO_LARGE); } else if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(literal, nodeValue, processedNodeValue, 16, sign, DiagnosticErrorCode.HEXADECIMAL_TOO_SMALL, DiagnosticErrorCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(Node literal, String originalNodeValue, String processedNodeValue, int radix, SyntaxKind sign, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { Location pos = getPosition(literal); if (sign == SyntaxKind.MINUS_TOKEN) { pos = new BLangDiagnosticLocation(pos.lineRange().filePath(), pos.lineRange().startLine().line(), pos.lineRange().endLine().line(), pos.lineRange().startLine().offset() - 1, pos.lineRange().endLine().offset()); dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } private String getHexNodeValue(String value) { if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private String fillWithZeros(String str) { while (str.length() < 4) { str = "0".concat(str); } return str; } private void markVariableWithFlag(BLangVariable variable, Flag flag) { variable.flagSet.add(flag); switch (variable.getKind()) { case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; for (BLangVariable var : tupleVariable.memberVariables) { markVariableWithFlag(var, flag); } if (tupleVariable.restVariable != null) { markVariableWithFlag(tupleVariable.restVariable, flag); } break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; for (BLangRecordVariableKeyValue keyValue : recordVariable.variableList) { markVariableWithFlag(keyValue.getValue(), flag); } if (recordVariable.restParam != null) { markVariableWithFlag((BLangVariable) recordVariable.restParam, flag); } break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; BLangSimpleVariable message = errorVariable.message; if (message != null) { markVariableWithFlag(message, flag); } BLangVariable cause = errorVariable.cause; if (cause != null) { markVariableWithFlag(cause, flag); } errorVariable.detail.forEach(entry -> markVariableWithFlag(entry.valueBindingPattern, flag)); if (errorVariable.restDetail != null) { markVariableWithFlag(errorVariable.restDetail, flag); } break; } } private boolean isSimpleLiteral(SyntaxKind syntaxKind) { switch (syntaxKind) { case STRING_LITERAL: case NUMERIC_LITERAL: case BOOLEAN_LITERAL: case NIL_LITERAL: case NULL_LITERAL: return true; default: return false; } } static boolean isType(SyntaxKind nodeKind) { switch (nodeKind) { case RECORD_TYPE_DESC: case OBJECT_TYPE_DESC: case NIL_TYPE_DESC: case OPTIONAL_TYPE_DESC: case ARRAY_TYPE_DESC: case INT_TYPE_DESC: case BYTE_TYPE_DESC: case FLOAT_TYPE_DESC: case DECIMAL_TYPE_DESC: case STRING_TYPE_DESC: case BOOLEAN_TYPE_DESC: case XML_TYPE_DESC: case JSON_TYPE_DESC: case HANDLE_TYPE_DESC: case ANY_TYPE_DESC: case ANYDATA_TYPE_DESC: case NEVER_TYPE_DESC: case VAR_TYPE_DESC: case SERVICE_TYPE_DESC: case MAP_TYPE_DESC: case UNION_TYPE_DESC: case ERROR_TYPE_DESC: case STREAM_TYPE_DESC: case TABLE_TYPE_DESC: case FUNCTION_TYPE_DESC: case TUPLE_TYPE_DESC: case PARENTHESISED_TYPE_DESC: case READONLY_TYPE_DESC: case DISTINCT_TYPE_DESC: case INTERSECTION_TYPE_DESC: case SINGLETON_TYPE_DESC: case TYPE_REFERENCE_TYPE_DESC: return true; default: return false; } } private boolean isNumericLiteral(SyntaxKind syntaxKind) { switch (syntaxKind) { case NUMERIC_LITERAL: return true; default: return false; } } private boolean isPresent(Node node) { return node.kind() != SyntaxKind.NONE; } private boolean checkIfAnonymous(Node node) { SyntaxKind parentKind = node.parent().kind(); return parentKind != SyntaxKind.DISTINCT_TYPE_DESC && parentKind != SyntaxKind.TYPE_DEFINITION; } private boolean ifInLocalContext(Node parent) { while (parent != null) { if (parent instanceof StatementNode) { return true; } parent = parent.parent(); } return false; } private BLangType createAnonymousRecordType(RecordTypeDescriptorNode recordTypeDescriptorNode, BLangRecordTypeNode recordTypeNode) { BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition(); Location pos = getPosition(recordTypeDescriptorNode); String genName = anonymousModelHelper.getNextAnonymousTypeKey(this.packageID); IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName, null); typeDef.setName(anonTypeGenName); typeDef.flagSet.add(Flag.PUBLIC); typeDef.flagSet.add(Flag.ANONYMOUS); typeDef.typeNode = recordTypeNode; typeDef.pos = pos; addToTop(typeDef); return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name); } private BLangUserDefinedType createUserDefinedType(Location pos, BLangIdentifier pkgAlias, BLangIdentifier name) { BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode(); userDefinedType.pos = pos; userDefinedType.pkgAlias = pkgAlias; userDefinedType.typeName = name; return userDefinedType; } private boolean withinByteRange(Object num) { if (num instanceof Long) { return (Long) num <= 255 && (Long) num >= 0; } return false; } private class SimpleVarBuilder { private BLangIdentifier name; private BLangType type; private boolean isDeclaredWithVar; private Set<Flag> flags = new HashSet<>(); private boolean isFinal; private ExpressionNode expr; private Location pos; public BLangSimpleVariable build() { BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); bLSimpleVar.setName(this.name); bLSimpleVar.setTypeNode(this.type); bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar; bLSimpleVar.setTypeNode(this.type); bLSimpleVar.flagSet.addAll(this.flags); if (this.isFinal) { markVariableWithFlag(bLSimpleVar, Flag.FINAL); } bLSimpleVar.setInitialExpression(this.expr); bLSimpleVar.pos = pos; return bLSimpleVar; } public SimpleVarBuilder with(String name) { this.name = createIdentifier(null, name); return this; } public SimpleVarBuilder with(String name, Location identifierPos) { this.name = createIdentifier(identifierPos, name); return this; } public SimpleVarBuilder with(Token token) { this.name = createIdentifier(token); return this; } public SimpleVarBuilder setTypeByNode(Node typeName) { this.isDeclaredWithVar = typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC; if (typeName == null) { return this; } this.type = createTypeNode(typeName); return this; } public SimpleVarBuilder setExpressionByNode(Node initExprNode) { this.expr = initExprNode != null ? createExpression(initExprNode) : null; return this; } public SimpleVarBuilder setExpression(ExpressionNode expression) { this.expr = expression; return this; } public SimpleVarBuilder isDeclaredWithVar() { this.isDeclaredWithVar = true; return this; } public SimpleVarBuilder isFinal() { this.isFinal = true; return this; } public SimpleVarBuilder isListenerVar() { this.flags.add(Flag.LISTENER); this.flags.add(Flag.FINAL); return this; } public SimpleVarBuilder setVisibility(Token visibilityQualifier) { if (visibilityQualifier != null) { if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) { this.flags.add(Flag.PRIVATE); } else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) { this.flags.add(Flag.PUBLIC); } } return this; } public SimpleVarBuilder setFinal(boolean present) { this.isFinal = present; return this; } public SimpleVarBuilder setOptional(boolean present) { if (present) { this.flags.add(Flag.PUBLIC); } else { this.flags.remove(Flag.PUBLIC); } return this; } public SimpleVarBuilder setRequired(boolean present) { if (present) { this.flags.add(Flag.REQUIRED); } else { this.flags.remove(Flag.REQUIRED); } return this; } public SimpleVarBuilder isPublic() { this.flags.add(Flag.PUBLIC); return this; } public SimpleVarBuilder isWorkerVar() { this.flags.add(Flag.WORKER); return this; } public SimpleVarBuilder setPos(Location pos) { this.pos = pos; return this; } } private void addFinalQualifier(BLangSimpleVariable simpleVar) { simpleVar.flagSet.add(Flag.FINAL); } private void addToTop(TopLevelNode topLevelNode) { if (currentCompilationUnit != null) { currentCompilationUnit.addTopLevelNode(topLevelNode); } } private Location expandLeft(Location location, Location upTo) { assert location.lineRange().startLine().line() > upTo.lineRange().startLine().line() || (location.lineRange().startLine().line() == upTo.lineRange().startLine().line() && location.lineRange().startLine().offset() >= upTo.lineRange().startLine().offset()); Location expandedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(), upTo.lineRange().startLine().line(), location.lineRange().endLine().line(), upTo.lineRange().startLine().offset(), location.lineRange().endLine().offset()); return expandedLocation; } private Location trimLeft(Location location, Location upTo) { assert location.lineRange().startLine().line() < upTo.lineRange().startLine().line() || (location.lineRange().startLine().line() == upTo.lineRange().startLine().line() && location.lineRange().startLine().offset() <= upTo.lineRange().startLine().offset()); Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(), upTo.lineRange().startLine().line(), location.lineRange().endLine().line(), upTo.lineRange().startLine().offset(), location.lineRange().endLine().offset()); return trimmedLocation; } private Location trimRight(Location location, Location upTo) { assert location.lineRange().endLine().line() > upTo.lineRange().endLine().line() || (location.lineRange().endLine().line() == upTo.lineRange().endLine().line() && location.lineRange().endLine().offset() >= upTo.lineRange().endLine().offset()); Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(), location.lineRange().startLine().line(), upTo.lineRange().endLine().line(), location.lineRange().startLine().offset(), upTo.lineRange().endLine().offset()); return trimmedLocation; } private void setClassQualifiers(NodeList<Token> qualifiers, BLangClassDefinition blangClass) { for (Token qualifier : qualifiers) { SyntaxKind kind = qualifier.kind(); switch (kind) { case DISTINCT_KEYWORD: blangClass.flagSet.add(Flag.DISTINCT); break; case CLIENT_KEYWORD: blangClass.flagSet.add(Flag.CLIENT); break; case READONLY_KEYWORD: blangClass.flagSet.add(Flag.READONLY); break; case SERVICE_KEYWORD: blangClass.flagSet.add(Flag.SERVICE); break; case ISOLATED_KEYWORD: blangClass.flagSet.add(Flag.ISOLATED); break; default: throw new RuntimeException("Syntax kind is not supported: " + kind); } } } }
Yes it does. If we see flakiness later we can add some idle time.
public void validQuerySucceeds(HttpClient httpClient, DigitalTwinsServiceVersion serviceVersion) { DigitalTwinsAsyncClient asyncClient = getAsyncClient(httpClient, serviceVersion); String floorModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.FLOOR_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator); String roomModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.ROOM_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator); String roomTwinId = UniqueIdHelper.getUniqueDigitalTwinId(TestAssetDefaults.ROOM_TWIN_ID_PREFIX, asyncClient, randomIntegerStringGenerator); try { String roomModelPayload = TestAssetsHelper.getRoomModelPayload(roomModelId, floorModelId); StepVerifier.create(asyncClient.createModels(new ArrayList<>(Arrays.asList(roomModelPayload)))) .assertNext(response -> assertThat(response.size()) .as("Created models successfully") .isEqualTo(1)) .verifyComplete(); String roomTwin = TestAssetsHelper.getRoomTwinPayload(roomModelId); StepVerifier.create(asyncClient.createDigitalTwinWithResponse(roomTwinId, roomTwin)) .assertNext(response -> assertThat(response.getStatusCode()) .as("Created digitaltwin successfully") .isEqualTo(HttpURLConnection.HTTP_OK)) .verifyComplete(); String queryString = "SELECT * FROM digitaltwins where IsOccupied = true"; StepVerifier.create(asyncClient.query(queryString, BasicDigitalTwin.class)) .thenConsumeWhile(dt -> { assertThat(dt.getCustomProperties().get("IsOccupied")) .as("IsOccupied should be true") .isEqualTo(true); return true; }) .verifyComplete(); } finally { try { if (roomTwinId != null) { asyncClient.deleteDigitalTwin(roomTwinId).block(); } if (roomModelId != null){ asyncClient.deleteModel(roomModelId).block(); } } catch (Exception ex) { fail("Failed to cleanup due to: ", ex); } } }
StepVerifier.create(asyncClient.query(queryString, BasicDigitalTwin.class))
public void validQuerySucceeds(HttpClient httpClient, DigitalTwinsServiceVersion serviceVersion) { DigitalTwinsAsyncClient asyncClient = getAsyncClient(httpClient, serviceVersion); String floorModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.FLOOR_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator); String roomModelId = UniqueIdHelper.getUniqueModelId(TestAssetDefaults.ROOM_MODEL_ID_PREFIX, asyncClient, randomIntegerStringGenerator); String roomTwinId = UniqueIdHelper.getUniqueDigitalTwinId(TestAssetDefaults.ROOM_TWIN_ID_PREFIX, asyncClient, randomIntegerStringGenerator); try { String roomModelPayload = TestAssetsHelper.getRoomModelPayload(roomModelId, floorModelId); StepVerifier.create(asyncClient.createModels(new ArrayList<>(Arrays.asList(roomModelPayload)))) .assertNext(response -> assertThat(response.size()) .as("Created models successfully") .isEqualTo(1)) .verifyComplete(); String roomTwin = TestAssetsHelper.getRoomTwinPayload(roomModelId); StepVerifier.create(asyncClient.createDigitalTwinWithResponse(roomTwinId, roomTwin)) .assertNext(response -> assertThat(response.getStatusCode()) .as("Created digitaltwin successfully") .isEqualTo(HttpURLConnection.HTTP_OK)) .verifyComplete(); String queryString = "SELECT * FROM digitaltwins where IsOccupied = true"; StepVerifier.create(asyncClient.query(queryString, BasicDigitalTwin.class)) .thenConsumeWhile(dt -> { assertThat(dt.getCustomProperties().get("IsOccupied")) .as("IsOccupied should be true") .isEqualTo(true); return true; }) .verifyComplete(); } finally { try { if (roomTwinId != null) { asyncClient.deleteDigitalTwin(roomTwinId).block(); } if (roomModelId != null){ asyncClient.deleteModel(roomModelId).block(); } } catch (Exception ex) { fail("Failed to cleanup due to: ", ex); } } }
class QueryAsyncTests extends QueryTestBase{ private final ClientLogger logger = new ClientLogger(ComponentsTests.class); @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.digitaltwins.core.TestHelper @Override }
class QueryAsyncTests extends QueryTestBase{ private final ClientLogger logger = new ClientLogger(ComponentsTests.class); @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.digitaltwins.core.TestHelper @Override }
These are [already cleared](https://github.com/ballerina-platform/ballerina-lang/blob/274e1dca660da80cc2854bbba37cec60a9b51acb/compiler/ballerina-lang/src/main/java/org/wso2/ballerinalang/compiler/semantics/analyzer/IsolationAnalyzer.java#L3527) at the end of this method though, via [`inferIsolation`](https://github.com/ballerina-platform/ballerina-lang/blob/274e1dca660da80cc2854bbba37cec60a9b51acb/compiler/ballerina-lang/src/main/java/org/wso2/ballerinalang/compiler/semantics/analyzer/IsolationAnalyzer.java#L332).
public BLangPackage analyze(BLangPackage pkgNode) { this.arrowFunctionTempSymbolMap.clear(); this.isolationInferenceInfoMap.clear(); this.dlog.setCurrentPackageId(pkgNode.packageID); SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol); Set<BSymbol> moduleLevelVarSymbols = getModuleLevelVarSymbols(pkgNode.globalVars); populateNonPublicMutableOrNonIsolatedVars(moduleLevelVarSymbols); List<BLangClassDefinition> classDefinitions = pkgNode.classDefinitions; populateNonPublicIsolatedInferableClasses(classDefinitions); analyzeNode(pkgNode, pkgEnv); inferIsolation(moduleLevelVarSymbols, getPubliclyExposedObjectTypes(pkgNode), classDefinitions); logServiceIsolationWarnings(classDefinitions); return pkgNode; }
this.isolationInferenceInfoMap.clear();
public BLangPackage analyze(BLangPackage pkgNode) { this.arrowFunctionTempSymbolMap.clear(); this.isolationInferenceInfoMap.clear(); this.dlog.setCurrentPackageId(pkgNode.packageID); SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol); Set<BSymbol> moduleLevelVarSymbols = getModuleLevelVarSymbols(pkgNode.globalVars); populateNonPublicMutableOrNonIsolatedVars(moduleLevelVarSymbols); List<BLangClassDefinition> classDefinitions = pkgNode.classDefinitions; populateNonPublicIsolatedInferableClasses(classDefinitions); analyzeNode(pkgNode, pkgEnv); inferIsolation(moduleLevelVarSymbols, getPubliclyExposedObjectTypes(pkgNode), classDefinitions); logServiceIsolationWarnings(classDefinitions); return pkgNode; }
class IsolationAnalyzer extends BLangNodeVisitor { private static final CompilerContext.Key<IsolationAnalyzer> ISOLATION_ANALYZER_KEY = new CompilerContext.Key<>(); private static final String VALUE_LANG_LIB = "lang.value"; private static final String CLONE_LANG_LIB_METHOD = "clone"; private static final String CLONE_READONLY_LANG_LIB_METHOD = "cloneReadOnly"; private SymbolEnv env; private final SymbolTable symTable; private final SymbolResolver symResolver; private final Names names; private final Types types; private final BLangDiagnosticLog dlog; private boolean inferredIsolated = true; private boolean inLockStatement = false; private final Stack<LockInfo> copyInLockInfoStack = new Stack<>(); private final Stack<Set<BSymbol>> isolatedLetVarStack = new Stack<>(); private final Map<BSymbol, IsolationInferenceInfo> isolationInferenceInfoMap = new HashMap<>(); private final Map<BLangArrowFunction, BInvokableSymbol> arrowFunctionTempSymbolMap = new HashMap<>(); private IsolationAnalyzer(CompilerContext context) { context.put(ISOLATION_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); } public static IsolationAnalyzer getInstance(CompilerContext context) { IsolationAnalyzer isolationAnalyzer = context.get(ISOLATION_ANALYZER_KEY); if (isolationAnalyzer == null) { isolationAnalyzer = new IsolationAnalyzer(context); } return isolationAnalyzer; } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; if (node != null) { node.accept(this); } this.env = prevEnv; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.ISOLATION_ANALYZE)) { return; } for (BLangTypeDefinition typeDefinition : pkgNode.typeDefinitions) { analyzeNode(typeDefinition.typeNode, env); } for (BLangClassDefinition classDefinition : pkgNode.classDefinitions) { if (classDefinition.flagSet.contains(Flag.ANONYMOUS) && isIsolated(classDefinition.getBType().flags)) { classDefinition.flagSet.add(Flag.ISOLATED); classDefinition.symbol.flags |= Flags.ISOLATED; } analyzeNode(classDefinition, env); } for (BLangFunction function : pkgNode.functions) { analyzeNode(function, env); } for (BLangVariable globalVar : pkgNode.globalVars) { analyzeNode(globalVar, env); } for (BLangTestablePackage testablePkg : pkgNode.testablePkgs) { analyze(testablePkg); } pkgNode.completedPhases.add(CompilerPhase.ISOLATION_ANALYZE); } @Override public void visit(BLangCompilationUnit compUnit) { } @Override public void visit(BLangImportPackage importPkgNode) { } @Override public void visit(BLangXMLNS xmlnsNode) { } @Override public void visit(BLangResourceFunction funcNode) { visit((BLangFunction) funcNode); } @Override public void visit(BLangFunction funcNode) { boolean prevInferredIsolated = this.inferredIsolated; this.inferredIsolated = true; IsolationInferenceInfo functionIsolationInferenceInfo = null; BInvokableSymbol symbol = funcNode.symbol; if (isIsolationInferableFunction(funcNode) && !isolationInferenceInfoMap.containsKey(symbol)) { functionIsolationInferenceInfo = new IsolationInferenceInfo(); isolationInferenceInfoMap.put(symbol, functionIsolationInferenceInfo); } SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, symbol.scope, env); for (BLangSimpleVariable requiredParam : funcNode.requiredParams) { if (!requiredParam.symbol.isDefaultable) { continue; } analyzeNode(requiredParam.expr, funcEnv); } analyzeNode(funcNode.body, funcEnv); if (this.inferredIsolated && !isIsolated(symbol.flags) && !Symbols.isFlagOn(symbol.flags, Flags.WORKER) && functionIsolationInferenceInfo != null && functionIsolationInferenceInfo.dependsOnlyOnInferableConstructs && !funcNode.objInitFunction) { functionIsolationInferenceInfo.inferredIsolated = true; } this.inferredIsolated = this.inferredIsolated && prevInferredIsolated; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); for (BLangStatement statement : body.stmts) { analyzeNode(statement, bodyEnv); } } @Override public void visit(BLangExprFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); analyzeNode(body.expr, bodyEnv); } @Override public void visit(BLangExternalFunctionBody body) { markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; } @Override public void visit(BLangService serviceNode) { } @Override public void visit(BLangTypeDefinition typeDefinition) { analyzeNode(typeDefinition.typeNode, env); } @Override public void visit(BLangConstant constant) { BLangType typeNode = constant.typeNode; if (typeNode != null) { analyzeNode(typeNode, env); } analyzeNode(constant.expr, env); } @Override public void visit(BLangSimpleVariable varNode) { BLangType typeNode = varNode.typeNode; if (typeNode != null && (typeNode.getBType() == null || typeNode.getBType().tsymbol == null || typeNode.getBType().tsymbol.owner.getKind() != SymbolKind.PACKAGE)) { analyzeNode(typeNode, env); } BVarSymbol symbol = varNode.symbol; var flags = symbol.flags; BLangExpression expr = varNode.expr; BType fieldType = varNode.getBType(); boolean isolatedClassField = isIsolatedClassField(); if (isolatedClassField && isExpectedToBeAPrivateField(symbol, fieldType) && !Symbols.isFlagOn(flags, Flags.PRIVATE)) { dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_NON_PRIVATE_MUTABLE_FIELD_IN_ISOLATED_OBJECT); } if (expr == null) { return; } if (isolatedClassField || varNode.flagSet.contains(Flag.ISOLATED)) { validateIsolatedExpression(fieldType, expr); } analyzeNode(expr, env); BSymbol owner = symbol.owner; if (owner != null && ((owner.tag & SymTag.LET) == SymTag.LET) && isIsolatedExpression(expr)) { isolatedLetVarStack.peek().add(symbol); } if (Symbols.isFlagOn(flags, Flags.WORKER)) { markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_WORKER_DECLARATION_IN_ISOLATED_FUNCTION); } } } @Override public void visit(BLangIdentifier identifierNode) { } @Override public void visit(BLangAnnotation annotationNode) { } @Override public void visit(BLangAnnotationAttachment annAttachmentNode) { BLangExpression expr = annAttachmentNode.expr; if (expr != null) { analyzeNode(expr, env); } } @Override public void visit(BLangBlockStmt blockNode) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); for (BLangStatement statement : blockNode.stmts) { analyzeNode(statement, blockEnv); } } @Override public void visit(BLangSimpleVariableDef varDefNode) { BLangVariable var = varDefNode.var; if (var.expr == null) { if (var.typeNode != null) { analyzeNode(var.typeNode, env); } return; } analyzeNode(var, env); } @Override public void visit(BLangAssignment assignNode) { BLangExpression varRef = assignNode.varRef; analyzeNode(varRef, env); BLangExpression expr = assignNode.expr; analyzeNode(expr, env); BLangInvokableNode enclInvokable = env.enclInvokable; if (varRef.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { BLangFieldBasedAccess fieldAccess = (BLangFieldBasedAccess) varRef; if (enclInvokable != null && enclInvokable.getKind() == NodeKind.FUNCTION && ((BLangFunction) enclInvokable).objInitFunction && isIsolatedObjectFieldOrMethodAccessViaSelf(fieldAccess, false)) { validateIsolatedExpression( ((BObjectType) enclInvokable.symbol.owner.type).fields.get(fieldAccess.field.value).type, expr); } } validateTransferOut(varRef, expr); } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { analyzeNode(compoundAssignNode.varRef, env); analyzeNode(compoundAssignNode.expr, env); } @Override public void visit(BLangRetry retryNode) { analyzeNode(retryNode.retrySpec, env); analyzeNode(retryNode.retryBody, env); } @Override public void visit(BLangRetryTransaction retryTransaction) { analyzeNode(retryTransaction.retrySpec, env); analyzeNode(retryTransaction.transaction, env); } @Override public void visit(BLangRetrySpec retrySpec) { for (BLangExpression argExpr : retrySpec.argExprs) { analyzeNode(argExpr, env); } } @Override public void visit(BLangContinue continueNode) { } @Override public void visit(BLangBreak breakNode) { } @Override public void visit(BLangReturn returnNode) { BLangExpression expr = returnNode.expr; analyzeNode(expr, env); if (!this.inLockStatement) { return; } validateTransferOut(expr, this.copyInLockInfoStack.peek().nonIsolatedTransferOutExpressions); } @Override public void visit(BLangPanic panicNode) { analyzeNode(panicNode.expr, env); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { analyzeNode(xmlnsStmtNode.xmlnsDecl, env); } @Override public void visit(BLangExpressionStmt exprStmtNode) { analyzeNode(exprStmtNode.expr, env); } @Override public void visit(BLangIf ifNode) { analyzeNode(ifNode.expr, env); analyzeNode(ifNode.body, env); analyzeNode(ifNode.elseStmt, env); } @Override public void visit(BLangQueryAction queryAction) { for (BLangNode clause : queryAction.getQueryClauses()) { analyzeNode(clause, env); } analyzeNode(queryAction.doClause, env); } @Override public void visit(BLangMatch matchNode) { analyzeNode(matchNode.expr, env); for (BLangMatch.BLangMatchBindingPatternClause patternClause : matchNode.patternClauses) { analyzeNode(patternClause, env); } } @Override public void visit(BLangMatch.BLangMatchTypedBindingPatternClause patternClauseNode) { analyzeNode(patternClauseNode.variable, env); analyzeNode(patternClauseNode.body, env); } @Override public void visit(BLangMatchStatement matchStatement) { analyzeNode(matchStatement.expr, env); for (BLangMatchClause matchClause : matchStatement.matchClauses) { analyzeNode(matchClause, env); } if (matchStatement.onFailClause != null) { analyzeNode(matchStatement.onFailClause, env); } } @Override public void visit(BLangMatchGuard matchGuard) { analyzeNode(matchGuard.expr, env); } @Override public void visit(BLangConstPattern constMatchPattern) { analyzeNode(constMatchPattern.expr, env); } @Override public void visit(BLangWildCardMatchPattern wildCardMatchPattern) { } @Override public void visit(BLangListMatchPattern listMatchPattern) { for (BLangMatchPattern matchPattern : listMatchPattern.matchPatterns) { analyzeNode(matchPattern, env); } if (listMatchPattern.restMatchPattern != null) { analyzeNode(listMatchPattern.restMatchPattern, env); } } @Override public void visit(BLangRestMatchPattern restMatchPattern) { } @Override public void visit(BLangMappingMatchPattern mappingMatchPattern) { for (BLangFieldMatchPattern fieldMatchPattern : mappingMatchPattern.fieldMatchPatterns) { analyzeNode(fieldMatchPattern, env); } } @Override public void visit(BLangFieldMatchPattern fieldMatchPattern) { analyzeNode(fieldMatchPattern.fieldName, env); analyzeNode(fieldMatchPattern.matchPattern, env); } @Override public void visit(BLangWildCardBindingPattern wildCardBindingPattern) { } @Override public void visit(BLangVarBindingPatternMatchPattern varBindingPattern) { analyzeNode(varBindingPattern.getBindingPattern(), env); } @Override public void visit(BLangCaptureBindingPattern captureBindingPattern) { } @Override public void visit(BLangErrorBindingPattern errorBindingPattern) { analyzeNode(errorBindingPattern.errorMessageBindingPattern, env); analyzeNode(errorBindingPattern.errorCauseBindingPattern, env); analyzeNode(errorBindingPattern.errorFieldBindingPatterns, env); } @Override public void visit(BLangErrorMessageBindingPattern errorMessageBindingPattern) { analyzeNode(errorMessageBindingPattern.simpleBindingPattern, env); } @Override public void visit(BLangSimpleBindingPattern simpleBindingPattern) { analyzeNode(simpleBindingPattern.wildCardBindingPattern, env); analyzeNode(simpleBindingPattern.captureBindingPattern, env); } @Override public void visit(BLangErrorCauseBindingPattern errorCauseBindingPattern) { analyzeNode(errorCauseBindingPattern.simpleBindingPattern, env); analyzeNode(errorCauseBindingPattern.errorBindingPattern, env); } @Override public void visit(BLangErrorFieldBindingPatterns errorFieldBindingPatterns) { for (BLangNamedArgBindingPattern namedArgBindingPattern : errorFieldBindingPatterns.namedArgBindingPatterns) { analyzeNode(namedArgBindingPattern, env); } analyzeNode(errorFieldBindingPatterns.restBindingPattern, env); } @Override public void visit(BLangNamedArgBindingPattern namedArgBindingPattern) { analyzeNode(namedArgBindingPattern.argName, env); analyzeNode(namedArgBindingPattern.bindingPattern, env); } @Override public void visit(BLangErrorMatchPattern errorMatchPattern) { analyzeNode(errorMatchPattern.errorMessageMatchPattern, env); analyzeNode(errorMatchPattern.errorCauseMatchPattern, env); analyzeNode(errorMatchPattern.errorFieldMatchPatterns, env); } @Override public void visit(BLangErrorMessageMatchPattern errorMessageMatchPattern) { analyzeNode(errorMessageMatchPattern.simpleMatchPattern, env); } @Override public void visit(BLangSimpleMatchPattern simpleMatchPattern) { analyzeNode(simpleMatchPattern.wildCardMatchPattern, env); analyzeNode(simpleMatchPattern.constPattern, env); analyzeNode(simpleMatchPattern.varVariableName, env); } @Override public void visit(BLangErrorCauseMatchPattern errorCauseMatchPattern) { analyzeNode(errorCauseMatchPattern.simpleMatchPattern, env); analyzeNode(errorCauseMatchPattern.errorMatchPattern, env); } @Override public void visit(BLangErrorFieldMatchPatterns errorFieldMatchPatterns) { for (BLangNamedArgMatchPattern namedArgMatchPattern : errorFieldMatchPatterns.namedArgMatchPatterns) { analyzeNode(namedArgMatchPattern, env); } analyzeNode(errorFieldMatchPatterns.restMatchPattern, env); } @Override public void visit(BLangNamedArgMatchPattern namedArgMatchPattern) { analyzeNode(namedArgMatchPattern.argName, env); analyzeNode(namedArgMatchPattern.matchPattern, env); } @Override public void visit(BLangListBindingPattern listBindingPattern) { for (BLangBindingPattern bindingPattern : listBindingPattern.bindingPatterns) { analyzeNode(bindingPattern, env); } } @Override public void visit(BLangRestBindingPattern restBindingPattern) { } @Override public void visit(BLangMappingBindingPattern mappingBindingPattern) { for (BLangFieldBindingPattern fieldBindingPattern : mappingBindingPattern.fieldBindingPatterns) { analyzeNode(fieldBindingPattern, env); } } @Override public void visit(BLangFieldBindingPattern fieldBindingPattern) { analyzeNode(fieldBindingPattern.fieldName, env); analyzeNode(fieldBindingPattern.bindingPattern, env); } @Override public void visit(BLangMatchClause matchClause) { for (BLangMatchPattern matchPattern : matchClause.matchPatterns) { analyzeNode(matchPattern, env); } BLangMatchGuard matchGuard = matchClause.matchGuard; if (matchGuard != null) { analyzeNode(matchGuard, env); } analyzeNode(matchClause.blockStmt, env); } @Override public void visit(BLangForeach foreach) { analyzeNode(foreach.collection, env); analyzeNode(foreach.body, env); BLangOnFailClause onFailClause = foreach.onFailClause; if (onFailClause != null) { analyzeNode(onFailClause, env); } } @Override public void visit(BLangFromClause fromClause) { SymbolEnv fromEnv = fromClause.env; analyzeNode((BLangNode) fromClause.getVariableDefinitionNode(), fromEnv); analyzeNode(fromClause.collection, fromEnv); } @Override public void visit(BLangJoinClause joinClause) { SymbolEnv joinEnv = joinClause.env; analyzeNode((BLangNode) joinClause.getVariableDefinitionNode(), joinEnv); analyzeNode(joinClause.collection, joinEnv); analyzeNode((BLangNode) joinClause.onClause, joinEnv); } @Override public void visit(BLangLetClause letClause) { SymbolEnv letClauseEnv = letClause.env; for (BLangLetVariable letVarDeclaration : letClause.letVarDeclarations) { analyzeNode((BLangNode) letVarDeclaration.definitionNode, letClauseEnv); } } @Override public void visit(BLangOnClause onClause) { analyzeNode(onClause.lhsExpr, env); analyzeNode(onClause.rhsExpr, env); } @Override public void visit(BLangOrderKey orderKeyClause) { analyzeNode(orderKeyClause.expression, env); } @Override public void visit(BLangOrderByClause orderByClause) { SymbolEnv orderByEnv = orderByClause.env; for (OrderKeyNode orderKeyNode : orderByClause.orderByKeyList) { analyzeNode((BLangExpression) orderKeyNode.getOrderKey(), orderByEnv); } } @Override public void visit(BLangSelectClause selectClause) { analyzeNode(selectClause.expression, selectClause.env); } @Override public void visit(BLangWhereClause whereClause) { analyzeNode(whereClause.expression, whereClause.env); } @Override public void visit(BLangDoClause doClause) { analyzeNode(doClause.body, doClause.env); } @Override public void visit(BLangOnFailClause onFailClause) { analyzeNode(onFailClause.body, env); } @Override public void visit(BLangOnConflictClause onConflictClause) { analyzeNode(onConflictClause.expression, env); } @Override public void visit(BLangLimitClause limitClause) { analyzeNode(limitClause.expression, env); } @Override public void visit(BLangWhile whileNode) { analyzeNode(whileNode.expr, env); analyzeNode(whileNode.body, env); BLangOnFailClause onFailClause = whileNode.onFailClause; if (onFailClause != null) { analyzeNode(onFailClause, env); } } @Override public void visit(BLangLock lockNode) { boolean prevInLockStatement = this.inLockStatement; this.inLockStatement = true; copyInLockInfoStack.push(new LockInfo(lockNode)); analyzeNode(lockNode.body, SymbolEnv.createLockEnv(lockNode, env)); LockInfo copyInLockInfo = copyInLockInfoStack.pop(); this.inLockStatement = prevInLockStatement; BLangOnFailClause onFailClause = lockNode.onFailClause; if (onFailClause != null) { analyzeNode(onFailClause, env); } Map<BSymbol, List<BLangSimpleVarRef>> accessedRestrictedVars = copyInLockInfo.accessedRestrictedVars; Set<BSymbol> accessedRestrictedVarKeys = accessedRestrictedVars.keySet(); Set<BSymbol> accessedNonImmutableAndNonIsolatedVars = copyInLockInfo.accessedPotentiallyIsolatedVars; if (!accessedRestrictedVarKeys.isEmpty()) { if (accessedRestrictedVarKeys.size() > 1) { for (BSymbol accessedRestrictedVarKey : accessedRestrictedVarKeys) { for (BLangSimpleVarRef varRef : accessedRestrictedVars.get(accessedRestrictedVarKey)) { dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_USAGE_OF_MULTIPLE_RESTRICTED_VARS_IN_LOCK); } } } for (BLangSimpleVarRef varRef : copyInLockInfo.nonCaptureBindingPatternVarRefsOnLhs) { dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_ASSIGNMENT_IN_LOCK_WITH_RESTRICTED_VAR_USAGE); } for (BLangExpression expr : copyInLockInfo.nonIsolatedTransferInExpressions) { dlog.error(expr.pos, DiagnosticErrorCode.INVALID_TRANSFER_INTO_LOCK_WITH_RESTRICTED_VAR_USAGE); } for (BLangExpression expr : copyInLockInfo.nonIsolatedTransferOutExpressions) { dlog.error(expr.pos, DiagnosticErrorCode.INVALID_TRANSFER_OUT_OF_LOCK_WITH_RESTRICTED_VAR_USAGE); } for (BLangInvocation invocation : copyInLockInfo.nonIsolatedInvocations) { dlog.error(invocation.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_IN_LOCK_WITH_RESTRICTED_VAR_USAGE); } } if (copyInLockInfoStack.empty()) { return; } BLangLock lastCheckedLockNode = lockNode; for (int i = copyInLockInfoStack.size() - 1; i >= 0; i--) { LockInfo prevCopyInLockInfo = copyInLockInfoStack.get(i); BLangLock outerLockNode = prevCopyInLockInfo.lockNode; if (!isEnclosedLockWithinSameFunction(lastCheckedLockNode, outerLockNode)) { return; } lastCheckedLockNode = outerLockNode; Map<BSymbol, List<BLangSimpleVarRef>> prevLockAccessedRestrictedVars = prevCopyInLockInfo.accessedRestrictedVars; for (Map.Entry<BSymbol, List<BLangSimpleVarRef>> entry : accessedRestrictedVars.entrySet()) { BSymbol key = entry.getKey(); if (prevLockAccessedRestrictedVars.containsKey(key)) { prevLockAccessedRestrictedVars.get(key).addAll(entry.getValue()); continue; } prevLockAccessedRestrictedVars.put(key, entry.getValue()); } prevCopyInLockInfo.accessedPotentiallyIsolatedVars.addAll(accessedNonImmutableAndNonIsolatedVars); if (!accessedRestrictedVars.isEmpty()) { continue; } prevCopyInLockInfo.nonCaptureBindingPatternVarRefsOnLhs.addAll( copyInLockInfo.nonCaptureBindingPatternVarRefsOnLhs); prevCopyInLockInfo.nonIsolatedTransferInExpressions.addAll(copyInLockInfo.nonIsolatedTransferInExpressions); prevCopyInLockInfo.nonIsolatedTransferOutExpressions.addAll( copyInLockInfo.nonIsolatedTransferOutExpressions); prevCopyInLockInfo.nonIsolatedInvocations.addAll(copyInLockInfo.nonIsolatedInvocations); prevCopyInLockInfo.accessedPotentiallyIsolatedVars.addAll(copyInLockInfo.accessedPotentiallyIsolatedVars); } } @Override public void visit(BLangTransaction transactionNode) { analyzeNode(transactionNode.transactionBody, env); } @Override public void visit(BLangTupleDestructure stmt) { BLangTupleVarRef varRef = stmt.varRef; BLangExpression expr = stmt.expr; analyzeNode(varRef, env); analyzeNode(expr, env); validateTransferOut(varRef, expr); } @Override public void visit(BLangRecordDestructure stmt) { BLangRecordVarRef varRef = stmt.varRef; BLangExpression expr = stmt.expr; analyzeNode(varRef, env); analyzeNode(expr, env); validateTransferOut(varRef, expr); } @Override public void visit(BLangErrorDestructure stmt) { BLangErrorVarRef varRef = stmt.varRef; BLangExpression expr = stmt.expr; analyzeNode(varRef, env); analyzeNode(expr, env); validateTransferOut(varRef, expr); } @Override public void visit(BLangForkJoin forkJoin) { markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(forkJoin.pos, DiagnosticErrorCode.INVALID_FORK_STATEMENT_IN_ISOLATED_FUNCTION); } } @Override public void visit(BLangWorkerSend workerSendNode) { } @Override public void visit(BLangWorkerReceive workerReceiveNode) { } @Override public void visit(BLangRollback rollbackNode) { analyzeNode(rollbackNode.expr, env); } @Override public void visit(BLangLiteral literalExpr) { } @Override public void visit(BLangConstRef constRef) { } @Override public void visit(BLangNumericLiteral literalExpr) { } @Override public void visit(BLangRecordLiteral recordLiteral) { for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; if (keyValuePair.key.computedKey) { analyzeNode(keyValuePair.key.expr, env); } analyzeNode(keyValuePair.valueExpr, env); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { analyzeNode((BLangRecordLiteral.BLangRecordVarNameField) field, env); } else { analyzeNode(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env); } } } @Override public void visit(BLangTupleVarRef varRefExpr) { for (BLangExpression expression : varRefExpr.expressions) { analyzeNode(expression, env); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { analyzeNode(restParam, env); } } @Override public void visit(BLangRecordVarRef varRefExpr) { for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { analyzeNode(recordRefField.variableReference, env); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { analyzeNode(restParam, env); } } @Override public void visit(BLangErrorVarRef varRefExpr) { analyzeNode(varRefExpr.message, env); BLangVariableReference cause = varRefExpr.cause; if (cause != null) { analyzeNode(cause, env); } for (BLangNamedArgsExpression namedArgsExpression : varRefExpr.detail) { analyzeNode(namedArgsExpression, env); } BLangVariableReference restVar = varRefExpr.restVar; if (restVar != null) { analyzeNode(restVar, env); } BLangType typeNode = varRefExpr.typeNode; if (typeNode != null) { analyzeNode(typeNode, env); } } @Override public void visit(BLangSimpleVarRef varRefExpr) { BType accessType = varRefExpr.getBType(); BSymbol symbol = varRefExpr.symbol; BLangInvokableNode enclInvokable = env.enclInvokable; BLangType enclType = env.enclType; if (symbol == null) { return; } BLangNode parent = varRefExpr.parent; boolean isolatedModuleVariableReference = isIsolatedModuleVariableSymbol(symbol); boolean accessOfPotentiallyIsolatedVariable = false; boolean accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable = false; Set<BSymbol> inferableClasses = new HashSet<>(); if ((symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { accessOfPotentiallyIsolatedVariable = this.isolationInferenceInfoMap.containsKey(symbol) && this.isolationInferenceInfoMap.get(symbol).getKind() != IsolationInferenceKind.FUNCTION; accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable = Symbols.isFlagOn(symbol.flags, Flags.FINAL) && !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(accessType) && isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(symbol.owner, accessType, inferableClasses); } if (inLockStatement) { LockInfo exprInfo = copyInLockInfoStack.peek(); if (isolatedModuleVariableReference || isMethodCallOnSelfInIsolatedObject(varRefExpr, parent)) { addToAccessedRestrictedVars(exprInfo.accessedRestrictedVars, varRefExpr); } if (parent == null && varRefExpr.isLValue) { if (!isSelfOfObject(varRefExpr) && isInvalidCopyIn(varRefExpr, env)) { exprInfo.nonCaptureBindingPatternVarRefsOnLhs.add(varRefExpr); } } else if ((!varRefExpr.isLValue || parent.getKind() != NodeKind.ASSIGNMENT) && !isIsolated(varRefExpr.symbol.flags) && !isSelfOfIsolatedObject(varRefExpr) && isInvalidCopyIn(varRefExpr, env)) { exprInfo.nonIsolatedTransferInExpressions.add(varRefExpr); } if (accessOfPotentiallyIsolatedVariable) { ((VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(symbol)).accessedLockInfo .add(exprInfo); exprInfo.accessedPotentiallyIsolatedVars.add(symbol); } } else if (accessOfPotentiallyIsolatedVariable || accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable) { VariableIsolationInferenceInfo inferenceInfo = (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(symbol); inferenceInfo.accessedOutsideLockStatement = true; inferenceInfo.accessOutsideLockStatementValidIfInferredIsolated = false; if (accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable) { inferenceInfo.dependsOnVariablesAndClasses.addAll(inferableClasses); } } boolean inIsolatedFunction = isInIsolatedFunction(enclInvokable); boolean recordFieldDefaultValue = isRecordFieldDefaultValue(enclType); boolean objectFieldDefaultValueRequiringIsolation = !recordFieldDefaultValue && isObjectFieldDefaultValueRequiringIsolation(env); SymbolEnv enclEnv = env.enclEnv; if (inIsolatedFunction) { if (enclInvokable == null) { BLangArrowFunction bLangArrowFunction = (BLangArrowFunction) enclEnv.node; for (BLangSimpleVariable param : bLangArrowFunction.params) { if (param.symbol == symbol) { return; } } } } if (!recordFieldDefaultValue && !objectFieldDefaultValueRequiringIsolation && enclInvokable != null && isReferenceToVarDefinedInSameInvokable(symbol.owner, enclInvokable.symbol)) { return; } long flags = symbol.flags; if (Symbols.isFlagOn(flags, Flags.CONSTANT)) { return; } if ((Symbols.isFlagOn(flags, Flags.FINAL) || Symbols.isFlagOn(flags, Flags.FUNCTION_FINAL)) && types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(accessType)) { return; } if (isDefinitionReference(symbol)) { return; } if (enclEnv != null && enclEnv.node != null && enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction bLangArrowFunction = (BLangArrowFunction) enclEnv.node; for (BLangSimpleVariable param : bLangArrowFunction.params) { if (param.symbol == symbol) { return; } } } if (isolatedModuleVariableReference) { if (!inLockStatement) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_ISOLATED_VARIABLE_ACCESS_OUTSIDE_LOCK); } return; } if (accessOfPotentiallyIsolatedVariable) { markDependentlyIsolatedOnVar(symbol); } else { markDependsOnIsolationNonInferableConstructs(); } inferredIsolated = false; if (inIsolatedFunction) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_ACCESS_IN_ISOLATED_FUNCTION); return; } if (recordFieldDefaultValue) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_ACCESS_AS_RECORD_DEFAULT); return; } if (objectFieldDefaultValueRequiringIsolation) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_ACCESS_AS_OBJECT_DEFAULT); return; } if (isObjectFieldDefaultValue(env)) { BLangFunction initFunction = ((BLangClassDefinition) env.node).initFunction; if (initFunction != null) { markInitMethodDependentlyIsolatedOnVar(initFunction, symbol); } } } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; analyzeNode(expr, env); if (!isInvalidIsolatedObjectFieldOrMethodAccessViaSelfIfOutsideLock(fieldAccessExpr, true)) { BType bType = expr.getBType(); BTypeSymbol tsymbol = bType.tsymbol; BLangIdentifier field = fieldAccessExpr.field; if (!isPotentiallyProtectedFieldAccessedInNonInitMethod(expr, tsymbol, field)) { return; } if (inLockStatement) { LockInfo lockInfo = copyInLockInfoStack.peek(); ((VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol)).accessedLockInfo .add(lockInfo); lockInfo.accessedPotentiallyIsolatedVars.add(tsymbol); return; } VariableIsolationInferenceInfo inferenceInfo = (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol); inferenceInfo.accessedOutsideLockStatement = true; BType fieldType = fieldAccessExpr.getBType(); if (Symbols.isFlagOn(((BObjectType) bType).fields.get(field.value).symbol.flags, Flags.FINAL) && isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(env.enclPkg.symbol, fieldType)) { inferenceInfo.typesOfFinalFieldsAccessedOutsideLock.add(fieldType); } else { inferenceInfo.accessOutsideLockStatementValidIfInferredIsolated = false; } return; } if (inLockStatement) { addToAccessedRestrictedVars(copyInLockInfoStack.peek().accessedRestrictedVars, (BLangSimpleVarRef) expr); return; } dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_FIELD_ACCESS_IN_ISOLATED_OBJECT_OUTSIDE_LOCK); } private boolean isPotentiallyProtectedFieldAccessedInNonInitMethod(BLangExpression expr, BTypeSymbol tsymbol, BLangIdentifier field) { return expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && isSelfOfObject((BLangSimpleVarRef) expr) && this.isolationInferenceInfoMap.containsKey(tsymbol) && !inObjectInitMethod() && ((ClassIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol)) .protectedFields.contains(field); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeNode(indexAccessExpr.expr, env); analyzeNode(indexAccessExpr.indexExpr, env); } @Override public void visit(BLangInvocation invocationExpr) { analyzeInvocation(invocationExpr); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { for (BLangExpression positionalArg : errorConstructorExpr.positionalArgs) { analyzeNode(positionalArg, env); } for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) { analyzeNode(namedArgsExpression, env); } } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) { if (!actionInvocationExpr.async) { analyzeInvocation(actionInvocationExpr); return; } markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; if (actionInvocationExpr.functionPointerInvocation) { return; } if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(actionInvocationExpr.pos, DiagnosticErrorCode.INVALID_ASYNC_INVOCATION_IN_ISOLATED_FUNCTION); } } @Override public void visit(BLangTypeInit typeInitExpr) { BInvokableSymbol initInvocationSymbol = (BInvokableSymbol) typeInitExpr.initInvocation.symbol; if (initInvocationSymbol != null && !isIsolated(initInvocationSymbol.flags)) { analyzeFunctionForInference(initInvocationSymbol); inferredIsolated = false; if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(typeInitExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INIT_EXPRESSION_IN_ISOLATED_FUNCTION); } else if (isRecordFieldDefaultValue(env.enclType)) { dlog.error(typeInitExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INIT_EXPRESSION_AS_RECORD_DEFAULT); } else if (isObjectFieldDefaultValueRequiringIsolation(env)) { dlog.error(typeInitExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INIT_EXPRESSION_AS_OBJECT_DEFAULT); } else if (isObjectFieldDefaultValue(env)) { BLangFunction initFunction = ((BLangClassDefinition) env.node).initFunction; if (initFunction != null) { markInitMethodDependentlyIsolatedOnFunction(initFunction, initInvocationSymbol); } } } for (BLangExpression expression : typeInitExpr.argsExpr) { analyzeNode(expression, env); } } @Override public void visit(BLangTernaryExpr ternaryExpr) { analyzeNode(ternaryExpr.expr, env); analyzeNode(ternaryExpr.thenExpr, env); analyzeNode(ternaryExpr.elseExpr, env); } @Override public void visit(BLangWaitExpr waitExpr) { for (BLangExpression expression : waitExpr.exprList) { analyzeNode(expression, env); } } @Override public void visit(BLangTrapExpr trapExpr) { analyzeNode(trapExpr.expr, env); } @Override public void visit(BLangBinaryExpr binaryExpr) { analyzeNode(binaryExpr.lhsExpr, env); analyzeNode(binaryExpr.rhsExpr, env); } @Override public void visit(BLangElvisExpr elvisExpr) { analyzeNode(elvisExpr.lhsExpr, env); analyzeNode(elvisExpr.rhsExpr, env); } @Override public void visit(BLangGroupExpr groupExpr) { analyzeNode(groupExpr.expression, env); } @Override public void visit(BLangLetExpression letExpr) { isolatedLetVarStack.push(new HashSet<>()); for (BLangLetVariable letVarDeclaration : letExpr.letVarDeclarations) { analyzeNode((BLangNode) letVarDeclaration.definitionNode, env); } analyzeNode(letExpr.expr, env); isolatedLetVarStack.pop(); } @Override public void visit(BLangLetVariable letVariable) { analyzeNode((BLangNode) letVariable.definitionNode.getVariable(), env); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { for (BLangExpression expr : listConstructorExpr.exprs) { analyzeNode(expr, env); } } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { analyzeNode(recordLiteral, env); } } @Override public void visit(BLangUnaryExpr unaryExpr) { analyzeNode(unaryExpr.expr, env); } @Override public void visit(BLangTypedescExpr typedescExpr) { analyzeNode(typedescExpr.typeNode, env); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { analyzeNode(conversionExpr.typeNode, env); analyzeNode(conversionExpr.expr, env); } @Override public void visit(BLangXMLQName xmlQName) { } @Override public void visit(BLangXMLAttribute xmlAttribute) { analyzeNode(xmlAttribute.name, env); analyzeNode(xmlAttribute.value, env); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { for (BLangExpression child : xmlElementLiteral.children) { analyzeNode(child, env); } for (BLangXMLAttribute attribute : xmlElementLiteral.attributes) { analyzeNode(attribute, env); } for (BLangXMLNS inlineNamespace : xmlElementLiteral.inlineNamespaces) { analyzeNode(inlineNamespace, env); } } @Override public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) { for (BLangExpression expr : xmlSequenceLiteral.xmlItems) { analyzeNode(expr, env); } } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { for (BLangExpression expr : xmlTextLiteral.textFragments) { analyzeNode(expr, env); } } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { for (BLangExpression textFragment : xmlCommentLiteral.textFragments) { analyzeNode(textFragment, env); } } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { for (BLangExpression dataFragment : xmlProcInsLiteral.dataFragments) { analyzeNode(dataFragment, env); } } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { for (BLangExpression textFragment : xmlQuotedString.textFragments) { analyzeNode(textFragment, env); } } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { for (BLangExpression expr : stringTemplateLiteral.exprs) { analyzeNode(expr, env); } } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { for (BLangExpression insertion : rawTemplateLiteral.insertions) { analyzeNode(insertion, env); } } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { } @Override public void visit(BLangArrowFunction bLangArrowFunction) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); createTempSymbolIfNonExistent(bLangArrowFunction); analyzeNode(bLangArrowFunction.body, arrowFunctionEnv); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { analyzeNode(intRangeExpression.startExpr, env); analyzeNode(intRangeExpression.endExpr, env); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { analyzeNode(bLangVarArgsExpression.expr, env); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { analyzeNode(bLangNamedArgsExpression.expr, env); } @Override public void visit(BLangCheckedExpr checkedExpr) { analyzeNode(checkedExpr.expr, env); } @Override public void visit(BLangDo doNode) { analyzeNode(doNode.body, env); if (doNode.onFailClause != null) { analyzeNode(doNode.onFailClause, env); } } @Override public void visit(BLangFail failExpr) { analyzeNode(failExpr.expr, env); } @Override public void visit(BLangCheckPanickedExpr checkPanickedExpr) { analyzeNode(checkPanickedExpr.expr, env); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { analyzeNode(serviceConstructorExpr.serviceNode, env); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { analyzeNode(typeTestExpr.expr, env); analyzeNode(typeTestExpr.typeNode, env); } @Override public void visit(BLangIgnoreExpr ignoreExpr) { } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { analyzeNode(annotAccessExpr.expr, env); } @Override public void visit(BLangQueryExpr queryExpr) { for (BLangNode clause : queryExpr.getQueryClauses()) { analyzeNode(clause, env); } } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { for (BLangExpression value : tableMultiKeyExpr.multiKeyIndexExprs) { analyzeNode(value, env); } } @Override public void visit(BLangTransactionalExpr transactionalExpr) { } @Override public void visit(BLangCommitExpr commitExpr) { } @Override public void visit(BLangValueType valueType) { } @Override public void visit(BLangArrayType arrayType) { analyzeNode(arrayType.getElementType(), env); } @Override public void visit(BLangBuiltInRefTypeNode builtInRefType) { } @Override public void visit(BLangConstrainedType constrainedType) { analyzeNode(constrainedType.constraint, env); } @Override public void visit(BLangStreamType streamType) { analyzeNode(streamType.constraint, env); analyzeNode(streamType.error, env); } @Override public void visit(BLangTableTypeNode tableType) { analyzeNode(tableType.constraint, env); if (tableType.tableKeyTypeConstraint != null) { analyzeNode(tableType.tableKeyTypeConstraint.keyType, env); } } @Override public void visit(BLangUserDefinedType userDefinedType) { } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { for (BLangVariable param : functionTypeNode.params) { analyzeNode(param.typeNode, env); } if (functionTypeNode.restParam != null) { analyzeNode(functionTypeNode.restParam.typeNode, env); } analyzeNode(functionTypeNode.returnTypeNode, env); } @Override public void visit(BLangUnionTypeNode unionTypeNode) { for (BLangType memberTypeNode : unionTypeNode.memberTypeNodes) { analyzeNode(memberTypeNode, env); } } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) { analyzeNode(constituentTypeNode, env); } } @Override public void visit(BLangObjectTypeNode objectTypeNode) { SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env); for (BLangSimpleVariable field : objectTypeNode.fields) { analyzeNode(field, objectEnv); } for (BLangSimpleVariable referencedField : objectTypeNode.includedFields) { analyzeNode(referencedField, objectEnv); } BLangFunction initFunction = objectTypeNode.initFunction; if (initFunction != null) { analyzeNode(initFunction, objectEnv); } for (BLangFunction function : objectTypeNode.functions) { analyzeNode(function, objectEnv); } } @Override public void visit(BLangClassDefinition classDefinition) { SymbolEnv classEnv = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, env); for (BLangSimpleVariable bLangSimpleVariable : classDefinition.fields) { analyzeNode(bLangSimpleVariable, classEnv); } for (BLangSimpleVariable field : classDefinition.referencedFields) { analyzeNode(field, classEnv); } BLangFunction initFunction = classDefinition.initFunction; if (initFunction != null) { analyzeNode(initFunction, classEnv); } for (BLangFunction function : classDefinition.functions) { analyzeNode(function, classEnv); } } @Override public void visit(BLangObjectConstructorExpression objectConstructorExpression) { visit(objectConstructorExpression.typeInit); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { } @Override public void visit(BLangRecordTypeNode recordTypeNode) { SymbolEnv typeEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env); for (BLangSimpleVariable field : recordTypeNode.fields) { analyzeNode(field, typeEnv); } for (BLangSimpleVariable referencedField : recordTypeNode.includedFields) { analyzeNode(referencedField, typeEnv); } BLangType restFieldType = recordTypeNode.restFieldType; if (restFieldType != null) { analyzeNode(restFieldType, typeEnv); } } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { for (BLangExpression expression : finiteTypeNode.valueSpace) { analyzeNode(expression, env); } } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { for (BLangType memberTypeNode : tupleTypeNode.memberTypeNodes) { analyzeNode(memberTypeNode, env); } analyzeNode(tupleTypeNode.restParamType, env); } @Override public void visit(BLangErrorType errorTypeNode) { analyzeNode(errorTypeNode.detailType, env); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { analyzeNode(bLangTupleVariable.typeNode, env); BLangExpression expr = bLangTupleVariable.expr; if (expr != null) { analyzeNode(expr, env); } } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { analyzeNode(bLangTupleVariableDef.var, env); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { analyzeNode(bLangRecordVariable.typeNode, env); BLangExpression expr = bLangRecordVariable.expr; if (expr != null) { analyzeNode(expr, env); } } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { analyzeNode(bLangRecordVariableDef.var, env); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { analyzeNode(bLangErrorVariable.typeNode, env); analyzeNode(bLangErrorVariable.expr, env); for (BLangErrorVariable.BLangErrorDetailEntry bLangErrorDetailEntry : bLangErrorVariable.detail) { analyzeNode(bLangErrorDetailEntry.valueBindingPattern, env); } } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { analyzeNode(bLangErrorVariableDef.errorVariable, env); } @Override public void visit(BLangMatch.BLangMatchStaticBindingPatternClause matchStaticBindingPatternClause) { analyzeNode(matchStaticBindingPatternClause.body, env); } @Override public void visit(BLangMatch.BLangMatchStructuredBindingPatternClause matchStmtStructuredBindingPatternClause) { analyzeNode(matchStmtStructuredBindingPatternClause.bindingPatternVariable, env); BLangExpression typeGuardExpr = matchStmtStructuredBindingPatternClause.typeGuardExpr; if (typeGuardExpr != null) { analyzeNode(typeGuardExpr, env); } analyzeNode(matchStmtStructuredBindingPatternClause.body, env); } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { for (BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair : waitForAllExpr.keyValuePairs) { analyzeNode(keyValuePair, env); } } @Override public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) { BLangExpression keyExpr = waitKeyValue.keyExpr; if (keyExpr != null) { analyzeNode(keyExpr, env); } BLangExpression valueExpr = waitKeyValue.valueExpr; if (valueExpr != null) { analyzeNode(valueExpr, env); } } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { BLangExpression childIndex = xmlNavigation.childIndex; if (childIndex != null) { analyzeNode(childIndex, env); } } private void analyzeInvocation(BLangInvocation invocationExpr) { List<BLangExpression> requiredArgs = invocationExpr.requiredArgs; List<BLangExpression> restArgs = invocationExpr.restArgs; BLangExpression expr = invocationExpr.expr; if (expr != null && (requiredArgs.isEmpty() || requiredArgs.get(0) != expr)) { analyzeNode(expr, env); } BInvokableSymbol symbol = (BInvokableSymbol) invocationExpr.symbol; if (symbol == null) { analyzeArgs(requiredArgs, restArgs); return; } boolean inIsolatedFunction = isInIsolatedFunction(env.enclInvokable); boolean recordFieldDefaultValue = isRecordFieldDefaultValue(env.enclType); boolean objectFieldDefaultValueRequiringIsolation = isObjectFieldDefaultValueRequiringIsolation(env); boolean expectsIsolation = inIsolatedFunction || recordFieldDefaultValue || objectFieldDefaultValueRequiringIsolation; boolean isolatedFunctionCall = isIsolated(symbol.type.flags); if (isolatedFunctionCall) { analyzeArgIsolatedness(invocationExpr, requiredArgs, restArgs, symbol, expectsIsolation); return; } analyzeArgs(requiredArgs, restArgs); if (inLockStatement) { copyInLockInfoStack.peek().nonIsolatedInvocations.add(invocationExpr); } long flags = symbol.flags; if (Symbols.isFlagOn(flags, Flags.ISOLATED_PARAM)) { return; } analyzeFunctionForInference(symbol); inferredIsolated = false; if (inIsolatedFunction) { dlog.error(invocationExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_IN_ISOLATED_FUNCTION); return; } if (recordFieldDefaultValue) { dlog.error(invocationExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_AS_RECORD_DEFAULT); } if (objectFieldDefaultValueRequiringIsolation) { dlog.error(invocationExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_AS_OBJECT_DEFAULT); } else if (isObjectFieldDefaultValue(env)) { BLangFunction initFunction = ((BLangClassDefinition) env.node).initFunction; if (initFunction != null) { markInitMethodDependentlyIsolatedOnFunction(initFunction, symbol); } } } private void analyzeArgs(List<BLangExpression> requiredArgs, List<BLangExpression> restArgs) { List<BLangExpression> args = new ArrayList<>(requiredArgs); args.addAll(restArgs); for (BLangExpression argExpr : args) { analyzeNode(argExpr, env); } } private void analyzeAndSetArrowFuncFlagForIsolatedParamArg(BLangExpression arg) { if (arg.getKind() == NodeKind.REST_ARGS_EXPR) { BLangExpression expr = ((BLangRestArgsExpression) arg).expr; if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_EXPR) { analyzeNode(arg, env); return; } for (BLangExpression expression : ((BLangListConstructorExpr) expr).exprs) { analyzeAndSetArrowFuncFlagForIsolatedParamArg(expression); } return; } boolean namedArg = arg.getKind() == NodeKind.NAMED_ARGS_EXPR; BLangExpression argExpr = namedArg ? ((BLangNamedArgsExpression) arg).expr : arg; if (argExpr.getKind() != NodeKind.ARROW_EXPR) { analyzeNode(argExpr, env); return; } boolean prevInferredIsolatedness = this.inferredIsolated; this.inferredIsolated = true; analyzeNode(argExpr, env); if (this.inferredIsolated) { BInvokableType invokableType = (BInvokableType) argExpr.getBType(); BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol; BInvokableTypeSymbol dupInvokableTypeSymbol = new BInvokableTypeSymbol(tsymbol.tag, tsymbol.flags | Flags.ISOLATED, tsymbol.pkgID, null, tsymbol.owner, tsymbol.pos, tsymbol.origin); dupInvokableTypeSymbol.params = tsymbol.params == null ? null : new ArrayList<>(tsymbol.params); BInvokableType dupInvokableType = new BInvokableType(invokableType.paramTypes, invokableType.restType, invokableType.retType, dupInvokableTypeSymbol); dupInvokableType.flags |= Flags.ISOLATED; dupInvokableTypeSymbol.type = dupInvokableType; argExpr.setBType(dupInvokableType); if (namedArg) { arg.setBType(dupInvokableType); } } this.inferredIsolated = prevInferredIsolatedness && this.inferredIsolated; } private void analyzeArgIsolatedness(BLangInvocation invocationExpr, List<BLangExpression> requiredArgs, List<BLangExpression> restArgs, BInvokableSymbol symbol, boolean expectsIsolation) { List<BVarSymbol> params = symbol.params; int paramsCount = params.size(); if (restArgs.isEmpty()) { int nextParamIndex = 0; for (BLangExpression arg : requiredArgs) { if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { BVarSymbol varSymbol = params.get(nextParamIndex++); if (!Symbols.isFlagOn(varSymbol.flags, Flags.ISOLATED_PARAM)) { analyzeNode(arg, env); continue; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); continue; } String name = ((BLangNamedArgsExpression) arg).name.value; for (BVarSymbol param : params) { if (!param.name.value.equals(name)) { continue; } if (!Symbols.isFlagOn(param.flags, Flags.ISOLATED_PARAM)) { analyzeNode(arg, env); continue; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); } } return; } int reqArgCount = requiredArgs.size(); for (int i = 0; i < reqArgCount; i++) { BLangExpression arg = requiredArgs.get(i); if (!Symbols.isFlagOn(params.get(i).flags, Flags.ISOLATED_PARAM)) { analyzeNode(arg, env); continue; } if (arg.getBType() == symTable.semanticError) { continue; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); } if (restArgs.get(restArgs.size() - 1).getKind() == NodeKind.REST_ARGS_EXPR) { BLangRestArgsExpression varArg = (BLangRestArgsExpression) restArgs.get(restArgs.size() - 1); BType varArgType = varArg.getBType(); Location varArgPos = varArg.pos; if (varArgType == symTable.semanticError) { return; } if (reqArgCount == paramsCount) { if (!Symbols.isFlagOn(symbol.restParam.flags, Flags.ISOLATED_PARAM)) { analyzeNode(varArg, env); return; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(varArg); analyzeVarArgIsolatedness(invocationExpr, varArg, varArgPos, expectsIsolation); return; } if (reqArgCount < paramsCount) { BTupleType tupleType = (BTupleType) varArgType; List<BType> memberTypes = tupleType.tupleTypes; BLangExpression varArgExpr = varArg.expr; boolean listConstrVarArg = varArgExpr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR; BLangListConstructorExpr listConstructorExpr = listConstrVarArg ? (BLangListConstructorExpr) varArgExpr : null; if (!listConstrVarArg) { analyzeNode(varArg, env); } int tupleIndex = 0; for (int i = reqArgCount; i < paramsCount; i++) { if (!Symbols.isFlagOn(params.get(i).flags, Flags.ISOLATED_PARAM)) { if (listConstrVarArg) { analyzeNode(listConstructorExpr.exprs.get(tupleIndex), env); } tupleIndex++; continue; } BType type = memberTypes.get(tupleIndex); BLangExpression arg = null; if (listConstrVarArg) { arg = listConstructorExpr.exprs.get(tupleIndex); analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); type = arg.getBType(); } handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, type, varArgPos); tupleIndex++; } BVarSymbol restParam = symbol.restParam; if (restParam == null) { return; } if (!Symbols.isFlagOn(restParam.flags, Flags.ISOLATED_PARAM)) { if (listConstructorExpr == null) { return; } List<BLangExpression> exprs = listConstructorExpr.exprs; for (int i = tupleIndex; i < exprs.size(); i++) { analyzeNode(exprs.get(i), env); } return; } int memberTypeCount = memberTypes.size(); if (tupleIndex < memberTypeCount) { for (int i = tupleIndex; i < memberTypeCount; i++) { BType type = memberTypes.get(i); BLangExpression arg = null; if (listConstrVarArg) { arg = listConstructorExpr.exprs.get(i); analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); type = arg.getBType(); } handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, type, varArgPos); } } if (listConstrVarArg) { List<BLangExpression> exprs = listConstructorExpr.exprs; for (int i = tupleIndex; i < exprs.size(); i++) { BLangExpression arg = exprs.get(i); analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), varArgPos); } return; } BType tupleRestType = tupleType.restType; if (tupleRestType == null) { return; } handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, tupleRestType, varArgPos); return; } } if (!Symbols.isFlagOn(symbol.restParam.flags, Flags.ISOLATED_PARAM)) { for (BLangExpression restArg : restArgs) { analyzeNode(restArg, env); } return; } analyzeRestArgsForRestParam(invocationExpr, restArgs, symbol, expectsIsolation); } private void analyzeRestArgsForRestParam(BLangInvocation invocationExpr, List<BLangExpression> restArgs, BInvokableSymbol symbol, boolean expectsIsolation) { if (Symbols.isFlagOn(((BArrayType) symbol.restParam.type).eType.flags, Flags.ISOLATED)) { for (BLangExpression restArg : restArgs) { analyzeNode(restArg, env); } return; } for (BLangExpression restArg : restArgs) { analyzeAndSetArrowFuncFlagForIsolatedParamArg(restArg); } int size = restArgs.size(); BLangExpression lastArg = restArgs.get(size - 1); boolean lastArgIsVarArg = lastArg.getKind() == NodeKind.REST_ARGS_EXPR; for (int i = 0; i < (lastArgIsVarArg ? size - 1 : size); i++) { BLangExpression arg = restArgs.get(i); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); } if (lastArgIsVarArg) { analyzeVarArgIsolatedness(invocationExpr, (BLangRestArgsExpression) lastArg, lastArg.pos, expectsIsolation); } } private void analyzeVarArgIsolatedness(BLangInvocation invocationExpr, BLangRestArgsExpression restArgsExpression, Location pos, boolean expectsIsolation) { BLangExpression expr = restArgsExpression.expr; if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) { for (BLangExpression expression : ((BLangListConstructorExpr) expr).exprs) { analyzeAndSetArrowFuncFlagForIsolatedParamArg(expression); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, expression, expectsIsolation, expression.getBType(), pos); } return; } BType varArgType = restArgsExpression.getBType(); if (varArgType.tag == TypeTags.ARRAY) { handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, ((BArrayType) varArgType).eType, pos); return; } BTupleType tupleType = (BTupleType) varArgType; for (BType type : tupleType.tupleTypes) { handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, type, pos); } BType restType = tupleType.restType; if (restType != null) { handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, restType, pos); } } private void handleNonExplicitlyIsolatedArgForIsolatedParam(BLangInvocation invocationExpr, BLangExpression expr, boolean expectsIsolation, BType type, Location pos) { if (Symbols.isFlagOn(type.flags, Flags.ISOLATED)) { return; } this.inferredIsolated = false; if (expectsIsolation) { dlog.error(pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_FUNCTION_AS_ARGUMENT); return; } if (expr != null) { NodeKind kind = expr.getKind(); if (kind == NodeKind.LAMBDA) { markFunctionDependentlyIsolatedOnFunction(env.enclInvokable, ((BLangLambdaFunction) expr).function.symbol); } else if (kind == NodeKind.ARROW_EXPR) { markFunctionDependentlyIsolatedOnFunction(env.enclInvokable, createTempSymbolIfNonExistent((BLangArrowFunction) expr)); } } else { markDependsOnIsolationNonInferableConstructs(); } if (inLockStatement) { copyInLockInfoStack.peek().nonIsolatedInvocations.add(invocationExpr); } } private boolean isInIsolatedFunction(BLangInvokableNode enclInvokable) { if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return false; } return isIsolated(((BLangArrowFunction) env.enclEnv.node).funcType.flags); } return isIsolated(enclInvokable.symbol.flags); } private boolean isRecordFieldDefaultValue(BLangType enclType) { if (enclType == null) { return false; } return enclType.getKind() == NodeKind.RECORD_TYPE; } private boolean isObjectFieldDefaultValueRequiringIsolation(SymbolEnv env) { if (!isObjectFieldDefaultValue(env)) { return false; } BLangClassDefinition classDefinition = (BLangClassDefinition) env.node; BLangFunction initFunction = classDefinition.initFunction; if (initFunction == null) { return true; } return isIsolated(initFunction.symbol.flags); } private boolean isObjectFieldDefaultValue(SymbolEnv env) { return env.node.getKind() == NodeKind.CLASS_DEFN; } private boolean isDefinitionReference(BSymbol symbol) { return Symbols.isTagOn(symbol, SymTag.TYPE_DEF) || Symbols.isTagOn(symbol, SymTag.FUNCTION); } private boolean isIsolated(long flags) { return Symbols.isFlagOn(flags, Flags.ISOLATED); } private boolean isIsolatedClassField() { BLangNode node = env.node; return node.getKind() == NodeKind.CLASS_DEFN && ((BLangClassDefinition) node).flagSet.contains(Flag.ISOLATED); } private boolean isExpectedToBeAPrivateField(BVarSymbol symbol, BType type) { return !Symbols.isFlagOn(symbol.flags, Flags.FINAL) || !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type); } private boolean isIsolatedObjectFieldOrMethodAccessViaSelf(BLangFieldBasedAccess fieldAccessExpr, boolean ignoreInit) { BLangExpression expr = fieldAccessExpr.expr; if (expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } if (!isSelfOfObject((BLangSimpleVarRef) expr)) { return false; } return isInIsolatedObjectMethod(env, ignoreInit); } private boolean isInvalidIsolatedObjectFieldOrMethodAccessViaSelfIfOutsideLock( BLangFieldBasedAccess fieldAccessExpr, boolean ignoreInit) { if (!isIsolatedObjectFieldOrMethodAccessViaSelf(fieldAccessExpr, ignoreInit)) { return false; } BField field = ((BObjectType) env.enclInvokable.symbol.owner.type).fields.get(fieldAccessExpr.field.value); if (field == null) { return false; } return isExpectedToBeAPrivateField(field.symbol, field.type); } private void validateIsolatedExpression(BType type, BLangExpression expression) { if (types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type)) { return; } validateIsolatedExpression(expression); } private void validateIsolatedExpression(BLangExpression expression) { isIsolatedExpression(expression, true, true, new ArrayList<>()); } private boolean isIsolatedExpression(BLangExpression expression) { return isIsolatedExpression(expression, false, false, new ArrayList<>()); } private boolean isIsolatedExpression(BLangExpression expression, boolean logErrors, boolean visitRestOnError, List<BLangExpression> nonIsolatedLocations) { return isIsolatedExpression(expression, logErrors, visitRestOnError, nonIsolatedLocations, false, null, null, null); } private boolean isIsolatedExpression(BLangExpression expression, boolean logErrors, boolean visitRestOnError, List<BLangExpression> nonIsolatedExpressions, boolean inferring, Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, Set<BSymbol> unresolvedSymbols) { BType type = expression.getBType(); if (type != null && isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, inferring, type, unresolvedSymbols)) { return true; } switch (expression.getKind()) { case SIMPLE_VARIABLE_REF: if (isReferenceOfLetVarInitializedWithAnIsolatedExpression((BLangSimpleVarRef) expression)) { return true; } break; case LITERAL: case NUMERIC_LITERAL: return true; case LIST_CONSTRUCTOR_EXPR: for (BLangExpression expr : ((BLangListConstructorExpr) expression).exprs) { if (isIsolatedExpression(expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case TABLE_CONSTRUCTOR_EXPR: for (BLangRecordLiteral mappingConstr : ((BLangTableConstructorExpr) expression).recordLiteralList) { if (isIsolatedExpression(mappingConstr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case RECORD_LITERAL_EXPR: for (RecordLiteralNode.RecordField field : ((BLangRecordLiteral) expression).fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; if (key.computedKey) { if (!isIsolatedExpression(key.expr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } } if (isIsolatedExpression(keyValueField.valueExpr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { if (isIsolatedExpression(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (isIsolatedExpression((BLangRecordLiteral.BLangRecordVarNameField) field, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case XML_COMMENT_LITERAL: BLangXMLCommentLiteral commentLiteral = (BLangXMLCommentLiteral) expression; for (BLangExpression textFragment : commentLiteral.textFragments) { if (isIsolatedExpression(textFragment, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } BLangExpression commentLiteralConcatExpr = commentLiteral.concatExpr; if (commentLiteralConcatExpr == null) { return true; } return isIsolatedExpression(commentLiteralConcatExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case XML_TEXT_LITERAL: BLangXMLTextLiteral textLiteral = (BLangXMLTextLiteral) expression; for (BLangExpression textFragment : textLiteral.textFragments) { if (isIsolatedExpression(textFragment, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } BLangExpression textLiteralConcatExpr = textLiteral.concatExpr; if (textLiteralConcatExpr == null) { return true; } return isIsolatedExpression(textLiteralConcatExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case XML_PI_LITERAL: BLangXMLProcInsLiteral procInsLiteral = (BLangXMLProcInsLiteral) expression; for (BLangExpression dataFragment : procInsLiteral.dataFragments) { if (isIsolatedExpression(dataFragment, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } BLangExpression procInsLiteralConcatExpr = procInsLiteral.dataConcatExpr; if (procInsLiteralConcatExpr == null) { return true; } return isIsolatedExpression(procInsLiteralConcatExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case XML_ELEMENT_LITERAL: for (BLangExpression child : ((BLangXMLElementLiteral) expression).children) { if (isIsolatedExpression(child, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case XML_SEQUENCE_LITERAL: for (BLangExpression xmlItem : ((BLangXMLSequenceLiteral) expression).xmlItems) { if (isIsolatedExpression(xmlItem, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case RAW_TEMPLATE_LITERAL: for (BLangExpression insertion : ((BLangRawTemplateLiteral) expression).insertions) { if (isIsolatedExpression(insertion, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case STRING_TEMPLATE_LITERAL: for (BLangExpression expr : ((BLangStringTemplateLiteral) expression).exprs) { if (isIsolatedExpression(expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case TYPE_CONVERSION_EXPR: return isIsolatedExpression(((BLangTypeConversionExpr) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case CHECK_EXPR: case CHECK_PANIC_EXPR: return isIsolatedExpression(((BLangCheckedExpr) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case TRAP_EXPR: return isIsolatedExpression(((BLangTrapExpr) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case TERNARY_EXPR: BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) expression; if (!isIsolatedExpression(ternaryExpr.expr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } if (!isIsolatedExpression(ternaryExpr.thenExpr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } return isIsolatedExpression(ternaryExpr.elseExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case ELVIS_EXPR: BLangElvisExpr elvisExpr = (BLangElvisExpr) expression; if (!isIsolatedExpression(elvisExpr.lhsExpr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } return isIsolatedExpression(elvisExpr.rhsExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case LET_EXPR: return isIsolatedExpression(((BLangLetExpression) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case GROUP_EXPR: return isIsolatedExpression(((BLangGroupExpr) expression).expression, logErrors, visitRestOnError, nonIsolatedExpressions); case TYPE_INIT_EXPR: BLangTypeInit typeInitExpr = (BLangTypeInit) expression; if (typeInitExpr == null) { return true; } expression = typeInitExpr.initInvocation; break; case OBJECT_CTOR_EXPRESSION: var objectConstructorExpression = (BLangObjectConstructorExpression) expression; typeInitExpr = objectConstructorExpression.typeInit; if (typeInitExpr == null) { return true; } expression = typeInitExpr.initInvocation; break; } if (expression.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expression; if (isCloneOrCloneReadOnlyInvocation(invocation)) { return true; } BSymbol invocationSymbol = invocation.symbol; if (invocationSymbol == null) { List<BLangExpression> argExprs = invocation.argExprs; if (argExprs.isEmpty()) { return true; } return isIsolatedExpression(argExprs.get(0), logErrors, visitRestOnError, nonIsolatedExpressions); } else if (isIsolated(invocationSymbol.type.flags) || (inferring && this.isolationInferenceInfoMap.containsKey(invocationSymbol) && inferFunctionIsolation(invocationSymbol, this.isolationInferenceInfoMap.get(invocationSymbol), publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols))) { List<BLangExpression> requiredArgs = invocation.requiredArgs; BLangExpression calledOnExpr = invocation.expr; if (calledOnExpr != null && (requiredArgs.isEmpty() || calledOnExpr != requiredArgs.get(0)) && (!isIsolatedExpression(calledOnExpr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError)) { return false; } for (BLangExpression requiredArg : requiredArgs) { if (requiredArg.getKind() == NodeKind.NAMED_ARGS_EXPR) { if (isIsolatedExpression(((BLangNamedArgsExpression) requiredArg).expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (isIsolatedExpression(requiredArg, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } for (BLangExpression restArg : invocation.restArgs) { if (restArg.getKind() == NodeKind.REST_ARGS_EXPR) { if (isIsolatedExpression(((BLangRestArgsExpression) restArg).expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (isIsolatedExpression(restArg, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; } } if (logErrors) { dlog.error(expression.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_EXPRESSION_AS_INITIAL_VALUE); } else { nonIsolatedExpressions.add(expression); } return false; } private boolean isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, boolean inferring, BType type, Set<BSymbol> unresolvedSymbols) { if (types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type)) { return true; } if (!inferring) { return false; } BTypeSymbol tsymbol = type.tsymbol; int tag = type.tag; if (tag == TypeTags.OBJECT) { if (this.isolationInferenceInfoMap.containsKey(tsymbol)) { return inferVariableOrClassIsolation(publiclyExposedObjectTypes, classDefinitions, tsymbol, (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol), true, unresolvedSymbols); } return false; } if (tag != TypeTags.UNION) { return false; } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, memberType, unresolvedSymbols)) { return false; } } return true; } private boolean isDependentlyIsolatedExpressionKind(BLangExpression expression) { switch (expression.getKind()) { case LIST_CONSTRUCTOR_EXPR: case TABLE_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: case XML_COMMENT_LITERAL: case XML_TEXT_LITERAL: case XML_PI_LITERAL: case XML_ELEMENT_LITERAL: case XML_SEQUENCE_LITERAL: case RAW_TEMPLATE_LITERAL: case STRING_TEMPLATE_LITERAL: case TYPE_CONVERSION_EXPR: case CHECK_EXPR: case CHECK_PANIC_EXPR: case TRAP_EXPR: case TERNARY_EXPR: case ELVIS_EXPR: return true; case GROUP_EXPR: return isDependentlyIsolatedExpressionKind(((BLangGroupExpr) expression).expression); } return false; } private boolean isCloneOrCloneReadOnlyInvocation(BLangInvocation invocation) { if (!invocation.langLibInvocation) { return false; } String methodName = invocation.symbol.name.value; return invocation.symbol.pkgID.name.value.equals(VALUE_LANG_LIB) && (methodName.equals(CLONE_LANG_LIB_METHOD) || methodName.equals(CLONE_READONLY_LANG_LIB_METHOD)); } private boolean isInvalidTransferIn(BLangSimpleVarRef expression) { return isInvalidTransferIn(expression, isSelfOfObject(expression)); } private boolean isInvalidTransferIn(BLangExpression expression, boolean invokedOnSelf) { BLangNode parent = expression.parent; NodeKind parentExprKind = parent.getKind(); if (!(parent instanceof BLangExpression)) { return !isIsolatedExpression(expression); } BLangExpression parentExpression = (BLangExpression) parent; if (parentExprKind != NodeKind.INVOCATION) { if (!isSelfReference(expression) && isIsolatedExpression(expression)) { return false; } return isInvalidTransferIn(parentExpression, invokedOnSelf); } BLangInvocation invocation = (BLangInvocation) parentExpression; BLangExpression calledOnExpr = invocation.expr; if (calledOnExpr == expression) { if (isIsolatedExpression(expression)) { return false; } if (isCloneOrCloneReadOnlyInvocation(invocation)) { return false; } if (!invokedOnSelf && invocation.getBType().tag == TypeTags.NIL) { return true; } return isInvalidTransferIn(parentExpression, invokedOnSelf); } return !isIsolatedExpression(expression); } private void validateTransferOut(BLangExpression expression, List<BLangExpression> nonIsolatedCopyOutExpressions) { if (!isDependentlyIsolatedExpressionKind(expression)) { if (!isIsolatedExpression(expression)) { nonIsolatedCopyOutExpressions.add(expression); } return; } isIsolatedExpression(expression, false, true, nonIsolatedCopyOutExpressions); } private void validateTransferOutViaAssignment(BLangExpression expression, BLangExpression varRef, List<BLangExpression> nonIsolatedCopyOutLocations) { if (!hasRefDefinedOutsideLock(varRef)) { return; } validateTransferOut(expression, nonIsolatedCopyOutLocations); } private void validateTransferOut(BLangExpression varRef, BLangExpression expr) { if (!this.inLockStatement) { return; } validateTransferOutViaAssignment(expr, varRef, this.copyInLockInfoStack.peek().nonIsolatedTransferOutExpressions); } private boolean isSelfReference(BLangExpression expression) { return expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && isSelfOfObject((BLangSimpleVarRef) expression); } private boolean isSelfOfObject(BLangSimpleVarRef varRefExpr) { if (!Names.SELF.value.equals(varRefExpr.variableName.value)) { return false; } BSymbol symbol = varRefExpr.symbol; if (symbol == null) { return false; } BSymbol owner = symbol.owner; if (owner == null || ((owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE)) { return false; } return symbol == ((BInvokableSymbol) owner).receiverSymbol; } private boolean isSelfOfIsolatedObject(BLangSimpleVarRef varRefExpr) { return isSelfOfObject(varRefExpr) && isIsolated(varRefExpr.symbol.type.flags); } private boolean hasRefDefinedOutsideLock(BLangExpression variableReference) { switch (variableReference.getKind()) { case SIMPLE_VARIABLE_REF: BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) variableReference; return isDefinedOutsideLock(names.fromIdNode(simpleVarRef.variableName), simpleVarRef.symbol.tag, env); case RECORD_VARIABLE_REF: BLangRecordVarRef recordVarRef = (BLangRecordVarRef) variableReference; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : recordVarRef.recordRefFields) { if (hasRefDefinedOutsideLock(recordRefField.variableReference)) { return true; } } ExpressionNode recordRestParam = recordVarRef.restParam; return recordRestParam != null && hasRefDefinedOutsideLock((BLangExpression) recordRestParam); case TUPLE_VARIABLE_REF: BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) variableReference; for (BLangExpression expression : tupleVarRef.expressions) { if (hasRefDefinedOutsideLock(expression)) { return true; } } ExpressionNode tupleRestParam = tupleVarRef.restParam; return tupleRestParam != null && hasRefDefinedOutsideLock((BLangExpression) tupleRestParam); case ERROR_VARIABLE_REF: BLangErrorVarRef errorVarRef = (BLangErrorVarRef) variableReference; BLangVariableReference message = errorVarRef.message; if (message != null && hasRefDefinedOutsideLock(message)) { return true; } BLangVariableReference cause = errorVarRef.cause; if (cause != null && hasRefDefinedOutsideLock(cause)) { return true; } for (BLangNamedArgsExpression namedArgsExpression : errorVarRef.detail) { if (hasRefDefinedOutsideLock(namedArgsExpression.expr)) { return true; } } BLangVariableReference errorRestVar = errorVarRef.restVar; return errorRestVar != null && hasRefDefinedOutsideLock(errorRestVar); } return false; } private boolean isDefinedOutsideLock(Name name, int symTag, SymbolEnv currentEnv) { if (Names.IGNORE == name || symResolver.lookupSymbolInGivenScope(currentEnv, name, symTag) != symTable.notFoundSymbol) { return false; } if (currentEnv.node.getKind() == NodeKind.LOCK) { return true; } return isDefinedOutsideLock(name, symTag, currentEnv.enclEnv); } private boolean isInIsolatedObjectMethod(SymbolEnv env, boolean ignoreInit) { BLangInvokableNode enclInvokable = env.enclInvokable; if (enclInvokable == null || (enclInvokable.getKind() != NodeKind.FUNCTION && enclInvokable.getKind() != NodeKind.RESOURCE_FUNC)) { return false; } BLangFunction enclFunction = (BLangFunction) enclInvokable; if (!enclFunction.attachedFunction) { return false; } if (enclFunction.objInitFunction && ignoreInit) { return false; } BType ownerType = enclInvokable.symbol.owner.type; return ownerType.tag == TypeTags.OBJECT && isIsolated(ownerType.flags); } private boolean isInvalidCopyIn(BLangSimpleVarRef varRefExpr, SymbolEnv currentEnv) { return isInvalidCopyIn(varRefExpr, names.fromIdNode(varRefExpr.variableName), varRefExpr.symbol.tag, currentEnv); } private boolean isInvalidCopyIn(BLangSimpleVarRef varRefExpr, Name name, int symTag, SymbolEnv currentEnv) { BSymbol symbol = symResolver.lookupSymbolInGivenScope(currentEnv, name, symTag); if (symbol != symTable.notFoundSymbol && (!(symbol instanceof BVarSymbol) || ((BVarSymbol) symbol).originalSymbol == null)) { return false; } if (currentEnv.node.getKind() == NodeKind.LOCK) { if (varRefExpr.parent == null) { return true; } return isInvalidTransferIn(varRefExpr); } return isInvalidCopyIn(varRefExpr, name, symTag, currentEnv.enclEnv); } private boolean isMethodCallOnSelfInIsolatedObject(BLangSimpleVarRef varRefExpr, BLangNode parent) { return isSelfVarInIsolatedObject(varRefExpr) && parent != null && parent.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR; } private boolean isSelfVarInIsolatedObject(BLangSimpleVarRef varRefExpr) { return isInIsolatedObjectMethod(env, true) && isSelfOfObject(varRefExpr); } private boolean isIsolatedModuleVariableSymbol(BSymbol symbol) { return symbol.owner.getKind() == SymbolKind.PACKAGE && isIsolated(symbol.flags); } private BSymbol getOriginalSymbol(BSymbol symbol) { if (!(symbol instanceof BVarSymbol)) { return symbol; } BVarSymbol varSymbol = (BVarSymbol) symbol; BVarSymbol originalSymbol = varSymbol.originalSymbol; return originalSymbol == null ? varSymbol : getOriginalSymbol(originalSymbol); } private void addToAccessedRestrictedVars(Map<BSymbol, List<BLangSimpleVarRef>> accessedRestrictedVars, BLangSimpleVarRef varRef) { BSymbol originalSymbol = getOriginalSymbol(varRef.symbol); if (accessedRestrictedVars.containsKey(originalSymbol)) { accessedRestrictedVars.get(originalSymbol).add(varRef); return; } accessedRestrictedVars.put(originalSymbol, new ArrayList<>() {{ add(varRef); }}); } private boolean isEnclosedLockWithinSameFunction(BLangLock currentLock, BLangLock potentialOuterLock) { return isEnclosedLockWithinSameFunction(currentLock.parent, potentialOuterLock); } private boolean isEnclosedLockWithinSameFunction(BLangNode parent, BLangLock potentialOuterLock) { if (parent == potentialOuterLock) { return true; } if (parent == null || parent.getKind() == NodeKind.FUNCTION) { return false; } return isEnclosedLockWithinSameFunction(parent.parent, potentialOuterLock); } private boolean isReferenceOfLetVarInitializedWithAnIsolatedExpression(BLangSimpleVarRef varRef) { BSymbol symbol = varRef.symbol; if ((symbol.owner.tag & SymTag.LET) != SymTag.LET) { return false; } BSymbol originalSymbol = getOriginalSymbol(symbol); for (int i = isolatedLetVarStack.size() - 1; i >= 0; i--) { if (isolatedLetVarStack.get(i).contains(originalSymbol)) { return true; } } return false; } private boolean isReferenceToVarDefinedInSameInvokable(BSymbol currentOwner, BInvokableSymbol enclInvokableSymbol) { if (currentOwner == enclInvokableSymbol) { return true; } if ((currentOwner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE) { return false; } BSymbol nextOwner = currentOwner.owner; if (nextOwner == null) { return false; } return isReferenceToVarDefinedInSameInvokable(nextOwner, enclInvokableSymbol); } private boolean isIsolationInferableFunction(BLangFunction funcNode) { Set<Flag> flagSet = funcNode.flagSet; if (flagSet.contains(Flag.INTERFACE)) { return false; } if (!flagSet.contains(Flag.ATTACHED)) { return !flagSet.contains(Flag.PUBLIC); } BSymbol owner = funcNode.symbol.owner; if (!Symbols.isFlagOn(owner.flags, Flags.PUBLIC)) { return true; } if (!(owner instanceof BClassSymbol)) { return false; } BClassSymbol ownerClassSymbol = (BClassSymbol) owner; return ownerClassSymbol.isServiceDecl || Symbols.isFlagOn(ownerClassSymbol.flags, Flags.OBJECT_CTOR); } private void markDependsOnIsolationNonInferableConstructs() { BLangInvokableNode enclInvokable = env.enclInvokable; BInvokableSymbol enclInvokableSymbol; if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return; } enclInvokableSymbol = this.arrowFunctionTempSymbolMap.get((BLangArrowFunction) env.enclEnv.node); } else { enclInvokableSymbol = enclInvokable.symbol; if (enclInvokable.getKind() == NodeKind.FUNCTION && ((BLangFunction) enclInvokable).attachedFunction) { BSymbol owner = enclInvokableSymbol.owner; if (this.isolationInferenceInfoMap.containsKey(owner)) { this.isolationInferenceInfoMap.get(owner).dependsOnlyOnInferableConstructs = false; } } if (!this.isolationInferenceInfoMap.containsKey(enclInvokableSymbol)) { return; } } this.isolationInferenceInfoMap.get(enclInvokableSymbol).dependsOnlyOnInferableConstructs = false; } private void analyzeFunctionForInference(BInvokableSymbol symbol) { if (Symbols.isFlagOn(symbol.flags, Flags.PUBLIC)) { markDependsOnIsolationNonInferableConstructs(); return; } markDependentlyIsolatedOnFunction(symbol); } private void markInitMethodDependentlyIsolatedOnFunction(BLangInvokableNode initMethod, BInvokableSymbol symbol) { BInvokableSymbol initMethodSymbol = initMethod.symbol; if (!isolationInferenceInfoMap.containsKey(initMethodSymbol)) { isolationInferenceInfoMap.put(initMethodSymbol, new IsolationInferenceInfo()); } markFunctionDependentlyIsolatedOnFunction(initMethod, symbol); } private void markDependentlyIsolatedOnFunction(BInvokableSymbol symbol) { BLangInvokableNode enclInvokable = env.enclInvokable; markFunctionDependentlyIsolatedOnFunction(enclInvokable, symbol); } private void markFunctionDependentlyIsolatedOnFunction(BLangInvokableNode enclInvokable, BInvokableSymbol symbol) { BInvokableSymbol enclInvokableSymbol; if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return; } enclInvokableSymbol = this.arrowFunctionTempSymbolMap.get((BLangArrowFunction) env.enclEnv.node); } else { enclInvokableSymbol = enclInvokable.symbol; if (!isolationInferenceInfoMap.containsKey(enclInvokableSymbol)) { return; } } isolationInferenceInfoMap.get(enclInvokableSymbol).dependsOnFunctions.add(symbol); } private boolean isNotInArrowFunctionBody(SymbolEnv env) { return env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY || env.enclEnv.node.getKind() != NodeKind.ARROW_EXPR; } private void markInitMethodDependentlyIsolatedOnVar(BLangInvokableNode initMethod, BSymbol symbol) { BInvokableSymbol initMethodSymbol = initMethod.symbol; if (!isolationInferenceInfoMap.containsKey(initMethodSymbol)) { isolationInferenceInfoMap.put(initMethodSymbol, new IsolationInferenceInfo()); } markFunctionDependentlyIsolatedOnVar(initMethod, symbol); } private void markDependentlyIsolatedOnVar(BSymbol symbol) { BLangInvokableNode enclInvokable = env.enclInvokable; markFunctionDependentlyIsolatedOnVar(enclInvokable, symbol); } private void markFunctionDependentlyIsolatedOnVar(BLangInvokableNode enclInvokable, BSymbol symbol) { BInvokableSymbol enclInvokableSymbol; if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return; } enclInvokableSymbol = this.arrowFunctionTempSymbolMap.get((BLangArrowFunction) env.enclEnv.node); } else { enclInvokableSymbol = enclInvokable.symbol; if (!isolationInferenceInfoMap.containsKey(enclInvokableSymbol)) { return; } } isolationInferenceInfoMap.get(enclInvokableSymbol).dependsOnVariablesAndClasses.add(symbol); } private Set<BSymbol> getModuleLevelVarSymbols(List<BLangVariable> moduleLevelVars) { Set<BSymbol> symbols = new HashSet<>(moduleLevelVars.size()); for (BLangVariable globalVar : moduleLevelVars) { symbols.add(globalVar.symbol); } return symbols; } private void populateNonPublicMutableOrNonIsolatedVars(Set<BSymbol> moduleLevelVarSymbols) { for (BSymbol moduleLevelVarSymbol : moduleLevelVarSymbols) { if (!isVarRequiringInference(moduleLevelVarSymbol)) { continue; } this.isolationInferenceInfoMap.put(moduleLevelVarSymbol, new VariableIsolationInferenceInfo()); } } private void populateNonPublicIsolatedInferableClasses(List<BLangClassDefinition> classDefinitions) { for (BLangClassDefinition classDefinition : classDefinitions) { populateInferableClass(classDefinition); } } private boolean inObjectInitMethod() { BLangInvokableNode enclInvokable = env.enclInvokable; if (enclInvokable == null || enclInvokable.getKind() != NodeKind.FUNCTION) { return false; } return ((BLangFunction) enclInvokable).objInitFunction; } private boolean isVarRequiringInference(BSymbol moduleLevelVarSymbol) { long symbolFlags = moduleLevelVarSymbol.flags; if (Symbols.isFlagOn(symbolFlags, Flags.PUBLIC) || Symbols.isFlagOn(symbolFlags, Flags.ISOLATED)) { return false; } if (!Symbols.isFlagOn(symbolFlags, Flags.FINAL)) { return true; } BType type = moduleLevelVarSymbol.type; return !types.isInherentlyImmutableType(type) && !Symbols.isFlagOn(type.flags, Flags.READONLY); } private void populateInferableClass(BLangClassDefinition classDefinition) { if (Symbols.isFlagOn(classDefinition.symbol.flags, Flags.PUBLIC) && !classDefinition.isServiceDecl && !classDefinition.flagSet.contains(Flag.OBJECT_CTOR)) { return; } BType type = classDefinition.getBType(); if (Symbols.isFlagOn(type.flags, Flags.ISOLATED)) { return; } Set<BLangIdentifier> protectedFields = new HashSet<>(); Set<BSymbol> dependentObjectTypes = new HashSet<>(); Map<String, BLangSimpleVariable> fields = new HashMap<>(); for (BLangSimpleVariable field : classDefinition.fields) { fields.put(field.name.value, field); } for (BLangSimpleVariable referencedField : classDefinition.referencedFields) { String name = referencedField.name.value; if (fields.containsKey(name)) { continue; } fields.put(name, referencedField); } for (BLangSimpleVariable field : fields.values()) { boolean isFinal = field.flagSet.contains(Flag.FINAL); boolean isPrivate = field.flagSet.contains(Flag.PRIVATE); if (!isFinal && !isPrivate) { return; } BType fieldType = field.getBType(); if (isFinal && types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(fieldType)) { continue; } boolean subtypeOfReadOnlyOrIsolatedObjectOrInferableObject = isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(classDefinition.symbol.owner, fieldType, dependentObjectTypes); if (!isPrivate && !subtypeOfReadOnlyOrIsolatedObjectOrInferableObject) { return; } protectedFields.add(field.name); } ClassIsolationInferenceInfo inferenceInfo = new ClassIsolationInferenceInfo(protectedFields); this.isolationInferenceInfoMap.put(classDefinition.symbol, inferenceInfo); inferenceInfo.dependsOnVariablesAndClasses.addAll(dependentObjectTypes); } private boolean isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(BSymbol owner, BType type) { return isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(owner, type, new HashSet<>()); } private boolean isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(BSymbol owner, BType type, Set<BSymbol> inferableClasses) { if (types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type)) { return true; } int tag = type.tag; if (tag == TypeTags.OBJECT) { BTypeSymbol tsymbol = type.tsymbol; boolean inferable = tsymbol.owner == owner && !Symbols.isFlagOn(tsymbol.flags, Flags.PUBLIC); if (inferable) { inferableClasses.add(tsymbol); } return inferable; } if (tag != TypeTags.UNION) { return false; } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(owner, memberType, inferableClasses)) { return false; } } return true; } private Set<BType> getPubliclyExposedObjectTypes(BLangPackage bLangPackage) { Set<BType> publiclyExposedTypes = new HashSet<>(); BPubliclyExposedInferableTypeCollector collector = new BPubliclyExposedInferableTypeCollector(publiclyExposedTypes); List<BLangVariable> moduleVarsAndConstants = new ArrayList<>() {{ addAll(bLangPackage.globalVars); addAll(bLangPackage.constants); }}; for (BLangVariable construct : moduleVarsAndConstants) { if (!construct.flagSet.contains(Flag.PUBLIC)) { continue; } BLangType typeNode = construct.typeNode; if (typeNode == null) { continue; } collector.visitType(typeNode.getBType()); } for (BLangTypeDefinition typeDefinition : bLangPackage.typeDefinitions) { Set<Flag> flagSet = typeDefinition.flagSet; if (!flagSet.contains(Flag.PUBLIC) || flagSet.contains(Flag.ANONYMOUS)) { continue; } collector.visitType(typeDefinition.typeNode.getBType()); } for (BLangClassDefinition classDefinition : bLangPackage.classDefinitions) { Set<Flag> flagSet = classDefinition.flagSet; if (!flagSet.contains(Flag.PUBLIC) || classDefinition.isServiceDecl || flagSet.contains(Flag.OBJECT_CTOR)) { continue; } collector.visitType(classDefinition.getBType()); } for (BLangFunction function : bLangPackage.functions) { if (!function.flagSet.contains(Flag.PUBLIC) && (!function.attachedFunction || !function.receiver.flagSet.contains(Flag.PUBLIC))) { continue; } collector.visitType(function.getBType()); } return publiclyExposedTypes; } private void inferIsolation(Set<BSymbol> moduleLevelVarSymbols, Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions) { for (Map.Entry<BSymbol, IsolationInferenceInfo> entry : this.isolationInferenceInfoMap.entrySet()) { IsolationInferenceInfo value = entry.getValue(); BSymbol symbol = entry.getKey(); if (value.getKind() == IsolationInferenceKind.FUNCTION) { if (inferFunctionIsolation(symbol, value, publiclyExposedObjectTypes, classDefinitions, new HashSet<>())) { symbol.flags |= Flags.ISOLATED; if (!moduleLevelVarSymbols.contains(symbol)) { symbol.type.flags |= Flags.ISOLATED; } } continue; } boolean isObjectType = symbol.kind == SymbolKind.OBJECT; if (!isObjectType && isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(publiclyExposedObjectTypes, classDefinitions, symbol, new HashSet<>())) { continue; } if (inferVariableOrClassIsolation(publiclyExposedObjectTypes, classDefinitions, symbol, (VariableIsolationInferenceInfo) value, isObjectType, new HashSet<>())) { symbol.flags |= Flags.ISOLATED; if (isObjectType) { symbol.type.flags |= Flags.ISOLATED; } } } this.isolationInferenceInfoMap.clear(); this.arrowFunctionTempSymbolMap.clear(); } private boolean inferVariableOrClassIsolation(Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, BSymbol symbol, VariableIsolationInferenceInfo inferenceInfo, boolean isObjectType, Set<BSymbol> unresolvedSymbols) { if (!unresolvedSymbols.add(symbol)) { return true; } if (!inferenceInfo.dependsOnlyOnInferableConstructs) { return false; } if (inferenceInfo.accessedOutsideLockStatement) { if (!inferenceInfo.accessOutsideLockStatementValidIfInferredIsolated) { if (inferenceInfo.getKind() == IsolationInferenceKind.CLASS) { return false; } if (Symbols.isFlagOn(symbol.flags, Flags.FINAL)) { return isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, symbol.type, unresolvedSymbols); } return false; } for (BType bType : inferenceInfo.typesOfFinalFieldsAccessedOutsideLock) { if (!isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, bType, unresolvedSymbols)) { return false; } } } if (isObjectType) { if (publiclyExposedObjectTypes.contains(symbol.type)) { return false; } BLangClassDefinition classDefinition = null; for (BLangClassDefinition classDef : classDefinitions) { if (classDef.symbol == symbol) { classDefinition = classDef; break; } } if (classDefinition != null) { List<BLangSimpleVariable> classFields = classDefinition.fields; Map<BLangIdentifier, BLangSimpleVariable> fields = new HashMap<>(classFields.size()); for (BLangSimpleVariable classField : classFields) { fields.put(classField.name, classField); } for (BLangIdentifier protectedField : ((ClassIsolationInferenceInfo) inferenceInfo).protectedFields) { BLangSimpleVariable field = fields.get(protectedField); if (field.flagSet.contains(Flag.PRIVATE)) { continue; } if (!field.flagSet.contains(Flag.FINAL)) { return false; } BType fieldType = field.typeNode.getBType(); if (!isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, fieldType, unresolvedSymbols)) { return false; } } for (BLangSimpleVariable field : classDefinition.fields) { BLangExpression expr = field.expr; if (expr != null && !isIsolatedExpression(expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { return false; } } BLangFunction initFunction = classDefinition.initFunction; if (initFunction != null) { BLangFunctionBody body = initFunction.body; for (BLangStatement stmt : ((BLangBlockFunctionBody) body).stmts) { if (stmt.getKind() != NodeKind.ASSIGNMENT) { continue; } BLangAssignment assignmentStmt = (BLangAssignment) stmt; BLangExpression lhs = assignmentStmt.varRef; if (lhs.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR) { continue; } BLangFieldBasedAccess fieldAccessExpr = (BLangFieldBasedAccess) lhs; BLangExpression calledOnExpr = fieldAccessExpr.expr; if (calledOnExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { continue; } if (!isSelfOfObject((BLangSimpleVarRef) calledOnExpr)) { continue; } if (!isIsolatedExpression(assignmentStmt.expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { return false; } } } } } else if (isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(publiclyExposedObjectTypes, classDefinitions, symbol, unresolvedSymbols)) { return true; } else if (Symbols.isFlagOn(symbol.flags, Flags.LISTENER)) { return false; } for (LockInfo lockInfo : inferenceInfo.accessedLockInfo) { if (!lockInfo.accessedRestrictedVars.isEmpty()) { return false; } for (BSymbol accessedPotentiallyIsolatedVar : lockInfo.accessedPotentiallyIsolatedVars) { if (accessedPotentiallyIsolatedVar == symbol) { continue; } if (!isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(publiclyExposedObjectTypes, classDefinitions, accessedPotentiallyIsolatedVar, unresolvedSymbols)) { return false; } } for (BLangExpression expr : lockInfo.nonIsolatedTransferInExpressions) { if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expr).symbol == symbol) { continue; } if (isIsolatedExpression(expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { continue; } return false; } for (BLangExpression expr : lockInfo.nonIsolatedTransferOutExpressions) { if (isIsolatedExpression(expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { continue; } return false; } for (BLangInvocation nonIsolatedInvocation : lockInfo.nonIsolatedInvocations) { BSymbol funcSymbol = nonIsolatedInvocation.symbol; if (!this.isolationInferenceInfoMap.containsKey(funcSymbol)) { return false; } if (inferFunctionIsolation(funcSymbol, this.isolationInferenceInfoMap.get(funcSymbol), publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { continue; } return false; } } return true; } private boolean isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, BSymbol symbol, Set<BSymbol> unresolvedSymbols) { return Symbols.isFlagOn(symbol.flags, Flags.FINAL) && isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, symbol.type, unresolvedSymbols); } private boolean inferFunctionIsolation(BSymbol symbol, IsolationInferenceInfo functionIsolationInferenceInfo, Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, Set<BSymbol> unresolvedSymbols) { if (!unresolvedSymbols.add(symbol)) { return true; } if (!functionIsolationInferenceInfo.dependsOnlyOnInferableConstructs) { return false; } if (symbol.kind == SymbolKind.FUNCTION) { BVarSymbol receiverSymbol = ((BInvokableSymbol) symbol).receiverSymbol; if (receiverSymbol != null && receiverSymbol.type.tag == TypeTags.OBJECT && publiclyExposedObjectTypes.contains(receiverSymbol.type)) { return false; } } if (functionIsolationInferenceInfo.inferredIsolated) { return true; } for (BInvokableSymbol bInvokableSymbol : functionIsolationInferenceInfo.dependsOnFunctions) { if (!this.isolationInferenceInfoMap.containsKey(bInvokableSymbol)) { return false; } if (!inferFunctionIsolation(bInvokableSymbol, this.isolationInferenceInfoMap.get(bInvokableSymbol), publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { return false; } } for (BSymbol dependsOnVariable : functionIsolationInferenceInfo.dependsOnVariablesAndClasses) { if (Symbols.isFlagOn(dependsOnVariable.flags, Flags.ISOLATED)) { continue; } if (!this.isolationInferenceInfoMap.containsKey(dependsOnVariable)) { return false; } if (!inferVariableOrClassIsolation(publiclyExposedObjectTypes, classDefinitions, dependsOnVariable, (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get( dependsOnVariable), false, unresolvedSymbols)) { return false; } } if (unresolvedSymbols.size() == 1) { functionIsolationInferenceInfo.inferredIsolated = true; } return true; } private void logServiceIsolationWarnings(List<BLangClassDefinition> classDefinitions) { for (BLangClassDefinition classDefinition : classDefinitions) { if (classDefinition.flagSet.contains(Flag.SERVICE)) { logServiceIsolationWarnings(classDefinition); } } } private void logServiceIsolationWarnings(BLangClassDefinition classDefinition) { boolean isolatedService = isIsolated(classDefinition.getBType().flags); for (BLangFunction function : classDefinition.functions) { Set<Flag> flagSet = function.flagSet; if (!flagSet.contains(Flag.RESOURCE) && !flagSet.contains(Flag.REMOTE)) { continue; } boolean isolatedMethod = isIsolated(function.getBType().flags); if (isolatedService && isolatedMethod) { continue; } dlog.warning(getStartLocation(function.pos), getWarningCode(isolatedService, isolatedMethod)); } } private Location getStartLocation(Location location) { LineRange lineRange = location.lineRange(); LinePosition linePosition = lineRange.startLine(); int startLine = linePosition.line(); int startColumn = linePosition.offset(); return new BLangDiagnosticLocation(lineRange.filePath(), startLine, startLine, startColumn, startColumn); } private DiagnosticWarningCode getWarningCode(boolean isolatedService, boolean isolatedMethod) { if (!isolatedService && !isolatedMethod) { return DiagnosticWarningCode .CONCURRENT_CALLS_WILL_NOT_BE_MADE_TO_NON_ISOLATED_METHOD_IN_NON_ISOLATED_SERVICE; } if (isolatedService) { return DiagnosticWarningCode.CONCURRENT_CALLS_WILL_NOT_BE_MADE_TO_NON_ISOLATED_METHOD; } return DiagnosticWarningCode.CONCURRENT_CALLS_WILL_NOT_BE_MADE_TO_NON_ISOLATED_SERVICE; } private BInvokableSymbol createTempSymbolIfNonExistent(BLangArrowFunction bLangArrowFunction) { if (arrowFunctionTempSymbolMap.containsKey(bLangArrowFunction)) { return arrowFunctionTempSymbolMap.get(bLangArrowFunction); } TemporaryArrowFunctionSymbol symbol = new TemporaryArrowFunctionSymbol(bLangArrowFunction); this.arrowFunctionTempSymbolMap.put(bLangArrowFunction, symbol); this.isolationInferenceInfoMap.put(symbol, new IsolationInferenceInfo()); return symbol; } /** * For lock statements with restricted var usage, invalid transfers and non-isolated invocations should result in * compilation errors. This class holds potentially erroneous expression per lock statement, and the protected * variables accessed in the lock statement, and information required for isolated inference. */ private static class LockInfo { BLangLock lockNode; Map<BSymbol, List<BLangSimpleVarRef>> accessedRestrictedVars = new HashMap<>(); List<BLangSimpleVarRef> nonCaptureBindingPatternVarRefsOnLhs = new ArrayList<>(); List<BLangExpression> nonIsolatedTransferInExpressions = new ArrayList<>(); List<BLangExpression> nonIsolatedTransferOutExpressions = new ArrayList<>(); List<BLangInvocation> nonIsolatedInvocations = new ArrayList<>(); Set<BSymbol> accessedPotentiallyIsolatedVars = new HashSet<>(); private LockInfo(BLangLock lockNode) { this.lockNode = lockNode; } } private static class IsolationInferenceInfo { boolean dependsOnlyOnInferableConstructs = true; Set<BInvokableSymbol> dependsOnFunctions = new HashSet<>(); Set<BSymbol> dependsOnVariablesAndClasses = new HashSet<>(); boolean inferredIsolated = false; IsolationInferenceKind getKind() { return IsolationInferenceKind.FUNCTION; } } private static class VariableIsolationInferenceInfo extends IsolationInferenceInfo { Set<LockInfo> accessedLockInfo = new HashSet<>(); boolean accessedOutsideLockStatement = false; boolean accessOutsideLockStatementValidIfInferredIsolated = true; Set<BType> typesOfFinalFieldsAccessedOutsideLock = new HashSet<>(); @Override IsolationInferenceKind getKind() { return IsolationInferenceKind.VARIABLE; } } private static class ClassIsolationInferenceInfo extends VariableIsolationInferenceInfo { Set<BLangIdentifier> protectedFields; ClassIsolationInferenceInfo(Set<BLangIdentifier> protectedFields) { this.protectedFields = protectedFields; } @Override IsolationInferenceKind getKind() { return IsolationInferenceKind.CLASS; } } private enum IsolationInferenceKind { CLASS, VARIABLE, FUNCTION } private class TemporaryArrowFunctionSymbol extends BInvokableSymbol { TemporaryArrowFunctionSymbol(BLangArrowFunction fn) { super(SymTag.FUNCTION, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, fn.funcType, env.enclEnv.enclVarSym, null, VIRTUAL); this.kind = SymbolKind.FUNCTION; } } private static class BPubliclyExposedInferableTypeCollector implements TypeVisitor { Set<BType> unresolvedTypes; Set<BType> exposedTypes; public BPubliclyExposedInferableTypeCollector(Set<BType> exposedTypes) { this.unresolvedTypes = new HashSet<>(); this.exposedTypes = exposedTypes; } public void visitType(BType type) { if (type == null) { return; } if (!unresolvedTypes.add(type)) { return; } type.accept(this); } @Override public void visit(BAnnotationType bAnnotationType) { } @Override public void visit(BArrayType bArrayType) { visitType(bArrayType.eType); } @Override public void visit(BBuiltInRefType bBuiltInRefType) { } @Override public void visit(BAnyType bAnyType) { } @Override public void visit(BAnydataType bAnydataType) { } @Override public void visit(BErrorType bErrorType) { visitType(bErrorType.detailType); } @Override public void visit(BFiniteType bFiniteType) { } @Override public void visit(BInvokableType bInvokableType) { if (Symbols.isFlagOn(bInvokableType.flags, Flags.ANY_FUNCTION)) { return; } for (BType paramType : bInvokableType.paramTypes) { visitType(paramType); } visitType(bInvokableType.restType); visitType(bInvokableType.retType); } @Override public void visit(BJSONType bjsonType) { } @Override public void visit(BMapType bMapType) { visitType(bMapType.constraint); } @Override public void visit(BStreamType bStreamType) { visitType(bStreamType.constraint); visitType(bStreamType.completionType); } @Override public void visit(BTypedescType bTypedescType) { visitType(bTypedescType.constraint); } @Override public void visit(BParameterizedType bTypedescType) { } @Override public void visit(BNeverType bNeverType) { } @Override public void visit(BNilType bNilType) { } @Override public void visit(BNoType bNoType) { } @Override public void visit(BPackageType bPackageType) { } @Override public void visit(BStructureType bStructureType) { } @Override public void visit(BTupleType bTupleType) { for (BType memType : bTupleType.tupleTypes) { visitType(memType); } visitType(bTupleType.restType); } @Override public void visit(BUnionType bUnionType) { for (BType memType : bUnionType.getMemberTypes()) { visitType(memType); } } @Override public void visit(BIntersectionType bIntersectionType) { for (BType constituentType : bIntersectionType.getConstituentTypes()) { visitType(constituentType); } visitType(bIntersectionType.effectiveType); } @Override public void visit(BXMLType bXmlType) { visitType(bXmlType.constraint); } @Override public void visit(BTableType bTableType) { visitType(bTableType.constraint); visitType(bTableType.keyTypeConstraint); } @Override public void visit(BRecordType bRecordType) { for (BField field : bRecordType.fields.values()) { visitType(field.type); } if (!bRecordType.sealed) { visitType(bRecordType.restFieldType); } } @Override public void visit(BObjectType bObjectType) { this.exposedTypes.add(bObjectType); for (BField field : bObjectType.fields.values()) { visitType(field.type); } for (BAttachedFunction attachedFunc : ((BObjectTypeSymbol) bObjectType.tsymbol).attachedFuncs) { visitType(attachedFunc.type); } } @Override public void visit(BType bType) { } @Override public void visit(BFutureType bFutureType) { visitType(bFutureType.constraint); } @Override public void visit(BHandleType bHandleType) { } } }
class IsolationAnalyzer extends BLangNodeVisitor { private static final CompilerContext.Key<IsolationAnalyzer> ISOLATION_ANALYZER_KEY = new CompilerContext.Key<>(); private static final String VALUE_LANG_LIB = "lang.value"; private static final String CLONE_LANG_LIB_METHOD = "clone"; private static final String CLONE_READONLY_LANG_LIB_METHOD = "cloneReadOnly"; private SymbolEnv env; private final SymbolTable symTable; private final SymbolResolver symResolver; private final Names names; private final Types types; private final BLangDiagnosticLog dlog; private boolean inferredIsolated = true; private boolean inLockStatement = false; private final Stack<LockInfo> copyInLockInfoStack = new Stack<>(); private final Stack<Set<BSymbol>> isolatedLetVarStack = new Stack<>(); private final Map<BSymbol, IsolationInferenceInfo> isolationInferenceInfoMap = new HashMap<>(); private final Map<BLangArrowFunction, BInvokableSymbol> arrowFunctionTempSymbolMap = new HashMap<>(); private IsolationAnalyzer(CompilerContext context) { context.put(ISOLATION_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.names = Names.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); } public static IsolationAnalyzer getInstance(CompilerContext context) { IsolationAnalyzer isolationAnalyzer = context.get(ISOLATION_ANALYZER_KEY); if (isolationAnalyzer == null) { isolationAnalyzer = new IsolationAnalyzer(context); } return isolationAnalyzer; } private void analyzeNode(BLangNode node, SymbolEnv env) { SymbolEnv prevEnv = this.env; this.env = env; if (node != null) { node.accept(this); } this.env = prevEnv; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.ISOLATION_ANALYZE)) { return; } for (BLangTypeDefinition typeDefinition : pkgNode.typeDefinitions) { analyzeNode(typeDefinition.typeNode, env); } for (BLangClassDefinition classDefinition : pkgNode.classDefinitions) { if (classDefinition.flagSet.contains(Flag.ANONYMOUS) && isIsolated(classDefinition.getBType().flags)) { classDefinition.flagSet.add(Flag.ISOLATED); classDefinition.symbol.flags |= Flags.ISOLATED; } analyzeNode(classDefinition, env); } for (BLangFunction function : pkgNode.functions) { analyzeNode(function, env); } for (BLangVariable globalVar : pkgNode.globalVars) { analyzeNode(globalVar, env); } for (BLangTestablePackage testablePkg : pkgNode.testablePkgs) { analyze(testablePkg); } pkgNode.completedPhases.add(CompilerPhase.ISOLATION_ANALYZE); } @Override public void visit(BLangCompilationUnit compUnit) { } @Override public void visit(BLangImportPackage importPkgNode) { } @Override public void visit(BLangXMLNS xmlnsNode) { } @Override public void visit(BLangResourceFunction funcNode) { visit((BLangFunction) funcNode); } @Override public void visit(BLangFunction funcNode) { boolean prevInferredIsolated = this.inferredIsolated; this.inferredIsolated = true; IsolationInferenceInfo functionIsolationInferenceInfo = null; BInvokableSymbol symbol = funcNode.symbol; if (isIsolationInferableFunction(funcNode) && !isolationInferenceInfoMap.containsKey(symbol)) { functionIsolationInferenceInfo = new IsolationInferenceInfo(); isolationInferenceInfoMap.put(symbol, functionIsolationInferenceInfo); } SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, symbol.scope, env); for (BLangSimpleVariable requiredParam : funcNode.requiredParams) { if (!requiredParam.symbol.isDefaultable) { continue; } analyzeNode(requiredParam.expr, funcEnv); } analyzeNode(funcNode.body, funcEnv); if (this.inferredIsolated && !isIsolated(symbol.flags) && !Symbols.isFlagOn(symbol.flags, Flags.WORKER) && functionIsolationInferenceInfo != null && functionIsolationInferenceInfo.dependsOnlyOnInferableConstructs && !funcNode.objInitFunction) { functionIsolationInferenceInfo.inferredIsolated = true; } this.inferredIsolated = this.inferredIsolated && prevInferredIsolated; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); for (BLangStatement statement : body.stmts) { analyzeNode(statement, bodyEnv); } } @Override public void visit(BLangExprFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); analyzeNode(body.expr, bodyEnv); } @Override public void visit(BLangExternalFunctionBody body) { markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; } @Override public void visit(BLangService serviceNode) { } @Override public void visit(BLangTypeDefinition typeDefinition) { analyzeNode(typeDefinition.typeNode, env); } @Override public void visit(BLangConstant constant) { BLangType typeNode = constant.typeNode; if (typeNode != null) { analyzeNode(typeNode, env); } analyzeNode(constant.expr, env); } @Override public void visit(BLangSimpleVariable varNode) { BLangType typeNode = varNode.typeNode; if (typeNode != null && (typeNode.getBType() == null || typeNode.getBType().tsymbol == null || typeNode.getBType().tsymbol.owner.getKind() != SymbolKind.PACKAGE)) { analyzeNode(typeNode, env); } BVarSymbol symbol = varNode.symbol; var flags = symbol.flags; BLangExpression expr = varNode.expr; BType fieldType = varNode.getBType(); boolean isolatedClassField = isIsolatedClassField(); if (isolatedClassField && isExpectedToBeAPrivateField(symbol, fieldType) && !Symbols.isFlagOn(flags, Flags.PRIVATE)) { dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_NON_PRIVATE_MUTABLE_FIELD_IN_ISOLATED_OBJECT); } if (expr == null) { return; } if (isolatedClassField || varNode.flagSet.contains(Flag.ISOLATED)) { validateIsolatedExpression(fieldType, expr); } analyzeNode(expr, env); BSymbol owner = symbol.owner; if (owner != null && ((owner.tag & SymTag.LET) == SymTag.LET) && isIsolatedExpression(expr)) { isolatedLetVarStack.peek().add(symbol); } if (Symbols.isFlagOn(flags, Flags.WORKER)) { markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(varNode.pos, DiagnosticErrorCode.INVALID_WORKER_DECLARATION_IN_ISOLATED_FUNCTION); } } } @Override public void visit(BLangIdentifier identifierNode) { } @Override public void visit(BLangAnnotation annotationNode) { } @Override public void visit(BLangAnnotationAttachment annAttachmentNode) { BLangExpression expr = annAttachmentNode.expr; if (expr != null) { analyzeNode(expr, env); } } @Override public void visit(BLangBlockStmt blockNode) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env); for (BLangStatement statement : blockNode.stmts) { analyzeNode(statement, blockEnv); } } @Override public void visit(BLangSimpleVariableDef varDefNode) { BLangVariable var = varDefNode.var; if (var.expr == null) { if (var.typeNode != null) { analyzeNode(var.typeNode, env); } return; } analyzeNode(var, env); } @Override public void visit(BLangAssignment assignNode) { BLangExpression varRef = assignNode.varRef; analyzeNode(varRef, env); BLangExpression expr = assignNode.expr; analyzeNode(expr, env); BLangInvokableNode enclInvokable = env.enclInvokable; if (varRef.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { BLangFieldBasedAccess fieldAccess = (BLangFieldBasedAccess) varRef; if (enclInvokable != null && enclInvokable.getKind() == NodeKind.FUNCTION && ((BLangFunction) enclInvokable).objInitFunction && isIsolatedObjectFieldOrMethodAccessViaSelf(fieldAccess, false)) { validateIsolatedExpression( ((BObjectType) enclInvokable.symbol.owner.type).fields.get(fieldAccess.field.value).type, expr); } } validateTransferOut(varRef, expr); } @Override public void visit(BLangCompoundAssignment compoundAssignNode) { analyzeNode(compoundAssignNode.varRef, env); analyzeNode(compoundAssignNode.expr, env); } @Override public void visit(BLangRetry retryNode) { analyzeNode(retryNode.retrySpec, env); analyzeNode(retryNode.retryBody, env); } @Override public void visit(BLangRetryTransaction retryTransaction) { analyzeNode(retryTransaction.retrySpec, env); analyzeNode(retryTransaction.transaction, env); } @Override public void visit(BLangRetrySpec retrySpec) { for (BLangExpression argExpr : retrySpec.argExprs) { analyzeNode(argExpr, env); } } @Override public void visit(BLangContinue continueNode) { } @Override public void visit(BLangBreak breakNode) { } @Override public void visit(BLangReturn returnNode) { BLangExpression expr = returnNode.expr; analyzeNode(expr, env); if (!this.inLockStatement) { return; } validateTransferOut(expr, this.copyInLockInfoStack.peek().nonIsolatedTransferOutExpressions); } @Override public void visit(BLangPanic panicNode) { analyzeNode(panicNode.expr, env); } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { analyzeNode(xmlnsStmtNode.xmlnsDecl, env); } @Override public void visit(BLangExpressionStmt exprStmtNode) { analyzeNode(exprStmtNode.expr, env); } @Override public void visit(BLangIf ifNode) { analyzeNode(ifNode.expr, env); analyzeNode(ifNode.body, env); analyzeNode(ifNode.elseStmt, env); } @Override public void visit(BLangQueryAction queryAction) { for (BLangNode clause : queryAction.getQueryClauses()) { analyzeNode(clause, env); } analyzeNode(queryAction.doClause, env); } @Override public void visit(BLangMatch matchNode) { analyzeNode(matchNode.expr, env); for (BLangMatch.BLangMatchBindingPatternClause patternClause : matchNode.patternClauses) { analyzeNode(patternClause, env); } } @Override public void visit(BLangMatch.BLangMatchTypedBindingPatternClause patternClauseNode) { analyzeNode(patternClauseNode.variable, env); analyzeNode(patternClauseNode.body, env); } @Override public void visit(BLangMatchStatement matchStatement) { analyzeNode(matchStatement.expr, env); for (BLangMatchClause matchClause : matchStatement.matchClauses) { analyzeNode(matchClause, env); } if (matchStatement.onFailClause != null) { analyzeNode(matchStatement.onFailClause, env); } } @Override public void visit(BLangMatchGuard matchGuard) { analyzeNode(matchGuard.expr, env); } @Override public void visit(BLangConstPattern constMatchPattern) { analyzeNode(constMatchPattern.expr, env); } @Override public void visit(BLangWildCardMatchPattern wildCardMatchPattern) { } @Override public void visit(BLangListMatchPattern listMatchPattern) { for (BLangMatchPattern matchPattern : listMatchPattern.matchPatterns) { analyzeNode(matchPattern, env); } if (listMatchPattern.restMatchPattern != null) { analyzeNode(listMatchPattern.restMatchPattern, env); } } @Override public void visit(BLangRestMatchPattern restMatchPattern) { } @Override public void visit(BLangMappingMatchPattern mappingMatchPattern) { for (BLangFieldMatchPattern fieldMatchPattern : mappingMatchPattern.fieldMatchPatterns) { analyzeNode(fieldMatchPattern, env); } } @Override public void visit(BLangFieldMatchPattern fieldMatchPattern) { analyzeNode(fieldMatchPattern.fieldName, env); analyzeNode(fieldMatchPattern.matchPattern, env); } @Override public void visit(BLangWildCardBindingPattern wildCardBindingPattern) { } @Override public void visit(BLangVarBindingPatternMatchPattern varBindingPattern) { analyzeNode(varBindingPattern.getBindingPattern(), env); } @Override public void visit(BLangCaptureBindingPattern captureBindingPattern) { } @Override public void visit(BLangErrorBindingPattern errorBindingPattern) { analyzeNode(errorBindingPattern.errorMessageBindingPattern, env); analyzeNode(errorBindingPattern.errorCauseBindingPattern, env); analyzeNode(errorBindingPattern.errorFieldBindingPatterns, env); } @Override public void visit(BLangErrorMessageBindingPattern errorMessageBindingPattern) { analyzeNode(errorMessageBindingPattern.simpleBindingPattern, env); } @Override public void visit(BLangSimpleBindingPattern simpleBindingPattern) { analyzeNode(simpleBindingPattern.wildCardBindingPattern, env); analyzeNode(simpleBindingPattern.captureBindingPattern, env); } @Override public void visit(BLangErrorCauseBindingPattern errorCauseBindingPattern) { analyzeNode(errorCauseBindingPattern.simpleBindingPattern, env); analyzeNode(errorCauseBindingPattern.errorBindingPattern, env); } @Override public void visit(BLangErrorFieldBindingPatterns errorFieldBindingPatterns) { for (BLangNamedArgBindingPattern namedArgBindingPattern : errorFieldBindingPatterns.namedArgBindingPatterns) { analyzeNode(namedArgBindingPattern, env); } analyzeNode(errorFieldBindingPatterns.restBindingPattern, env); } @Override public void visit(BLangNamedArgBindingPattern namedArgBindingPattern) { analyzeNode(namedArgBindingPattern.argName, env); analyzeNode(namedArgBindingPattern.bindingPattern, env); } @Override public void visit(BLangErrorMatchPattern errorMatchPattern) { analyzeNode(errorMatchPattern.errorMessageMatchPattern, env); analyzeNode(errorMatchPattern.errorCauseMatchPattern, env); analyzeNode(errorMatchPattern.errorFieldMatchPatterns, env); } @Override public void visit(BLangErrorMessageMatchPattern errorMessageMatchPattern) { analyzeNode(errorMessageMatchPattern.simpleMatchPattern, env); } @Override public void visit(BLangSimpleMatchPattern simpleMatchPattern) { analyzeNode(simpleMatchPattern.wildCardMatchPattern, env); analyzeNode(simpleMatchPattern.constPattern, env); analyzeNode(simpleMatchPattern.varVariableName, env); } @Override public void visit(BLangErrorCauseMatchPattern errorCauseMatchPattern) { analyzeNode(errorCauseMatchPattern.simpleMatchPattern, env); analyzeNode(errorCauseMatchPattern.errorMatchPattern, env); } @Override public void visit(BLangErrorFieldMatchPatterns errorFieldMatchPatterns) { for (BLangNamedArgMatchPattern namedArgMatchPattern : errorFieldMatchPatterns.namedArgMatchPatterns) { analyzeNode(namedArgMatchPattern, env); } analyzeNode(errorFieldMatchPatterns.restMatchPattern, env); } @Override public void visit(BLangNamedArgMatchPattern namedArgMatchPattern) { analyzeNode(namedArgMatchPattern.argName, env); analyzeNode(namedArgMatchPattern.matchPattern, env); } @Override public void visit(BLangListBindingPattern listBindingPattern) { for (BLangBindingPattern bindingPattern : listBindingPattern.bindingPatterns) { analyzeNode(bindingPattern, env); } } @Override public void visit(BLangRestBindingPattern restBindingPattern) { } @Override public void visit(BLangMappingBindingPattern mappingBindingPattern) { for (BLangFieldBindingPattern fieldBindingPattern : mappingBindingPattern.fieldBindingPatterns) { analyzeNode(fieldBindingPattern, env); } } @Override public void visit(BLangFieldBindingPattern fieldBindingPattern) { analyzeNode(fieldBindingPattern.fieldName, env); analyzeNode(fieldBindingPattern.bindingPattern, env); } @Override public void visit(BLangMatchClause matchClause) { for (BLangMatchPattern matchPattern : matchClause.matchPatterns) { analyzeNode(matchPattern, env); } BLangMatchGuard matchGuard = matchClause.matchGuard; if (matchGuard != null) { analyzeNode(matchGuard, env); } analyzeNode(matchClause.blockStmt, env); } @Override public void visit(BLangForeach foreach) { analyzeNode(foreach.collection, env); analyzeNode(foreach.body, env); BLangOnFailClause onFailClause = foreach.onFailClause; if (onFailClause != null) { analyzeNode(onFailClause, env); } } @Override public void visit(BLangFromClause fromClause) { SymbolEnv fromEnv = fromClause.env; analyzeNode((BLangNode) fromClause.getVariableDefinitionNode(), fromEnv); analyzeNode(fromClause.collection, fromEnv); } @Override public void visit(BLangJoinClause joinClause) { SymbolEnv joinEnv = joinClause.env; analyzeNode((BLangNode) joinClause.getVariableDefinitionNode(), joinEnv); analyzeNode(joinClause.collection, joinEnv); analyzeNode((BLangNode) joinClause.onClause, joinEnv); } @Override public void visit(BLangLetClause letClause) { SymbolEnv letClauseEnv = letClause.env; for (BLangLetVariable letVarDeclaration : letClause.letVarDeclarations) { analyzeNode((BLangNode) letVarDeclaration.definitionNode, letClauseEnv); } } @Override public void visit(BLangOnClause onClause) { analyzeNode(onClause.lhsExpr, env); analyzeNode(onClause.rhsExpr, env); } @Override public void visit(BLangOrderKey orderKeyClause) { analyzeNode(orderKeyClause.expression, env); } @Override public void visit(BLangOrderByClause orderByClause) { SymbolEnv orderByEnv = orderByClause.env; for (OrderKeyNode orderKeyNode : orderByClause.orderByKeyList) { analyzeNode((BLangExpression) orderKeyNode.getOrderKey(), orderByEnv); } } @Override public void visit(BLangSelectClause selectClause) { analyzeNode(selectClause.expression, selectClause.env); } @Override public void visit(BLangWhereClause whereClause) { analyzeNode(whereClause.expression, whereClause.env); } @Override public void visit(BLangDoClause doClause) { analyzeNode(doClause.body, doClause.env); } @Override public void visit(BLangOnFailClause onFailClause) { analyzeNode(onFailClause.body, env); } @Override public void visit(BLangOnConflictClause onConflictClause) { analyzeNode(onConflictClause.expression, env); } @Override public void visit(BLangLimitClause limitClause) { analyzeNode(limitClause.expression, env); } @Override public void visit(BLangWhile whileNode) { analyzeNode(whileNode.expr, env); analyzeNode(whileNode.body, env); BLangOnFailClause onFailClause = whileNode.onFailClause; if (onFailClause != null) { analyzeNode(onFailClause, env); } } @Override public void visit(BLangLock lockNode) { boolean prevInLockStatement = this.inLockStatement; this.inLockStatement = true; copyInLockInfoStack.push(new LockInfo(lockNode)); analyzeNode(lockNode.body, SymbolEnv.createLockEnv(lockNode, env)); LockInfo copyInLockInfo = copyInLockInfoStack.pop(); this.inLockStatement = prevInLockStatement; BLangOnFailClause onFailClause = lockNode.onFailClause; if (onFailClause != null) { analyzeNode(onFailClause, env); } Map<BSymbol, List<BLangSimpleVarRef>> accessedRestrictedVars = copyInLockInfo.accessedRestrictedVars; Set<BSymbol> accessedRestrictedVarKeys = accessedRestrictedVars.keySet(); Set<BSymbol> accessedNonImmutableAndNonIsolatedVars = copyInLockInfo.accessedPotentiallyIsolatedVars; if (!accessedRestrictedVarKeys.isEmpty()) { if (accessedRestrictedVarKeys.size() > 1) { for (BSymbol accessedRestrictedVarKey : accessedRestrictedVarKeys) { for (BLangSimpleVarRef varRef : accessedRestrictedVars.get(accessedRestrictedVarKey)) { dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_USAGE_OF_MULTIPLE_RESTRICTED_VARS_IN_LOCK); } } } for (BLangSimpleVarRef varRef : copyInLockInfo.nonCaptureBindingPatternVarRefsOnLhs) { dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_ASSIGNMENT_IN_LOCK_WITH_RESTRICTED_VAR_USAGE); } for (BLangExpression expr : copyInLockInfo.nonIsolatedTransferInExpressions) { dlog.error(expr.pos, DiagnosticErrorCode.INVALID_TRANSFER_INTO_LOCK_WITH_RESTRICTED_VAR_USAGE); } for (BLangExpression expr : copyInLockInfo.nonIsolatedTransferOutExpressions) { dlog.error(expr.pos, DiagnosticErrorCode.INVALID_TRANSFER_OUT_OF_LOCK_WITH_RESTRICTED_VAR_USAGE); } for (BLangInvocation invocation : copyInLockInfo.nonIsolatedInvocations) { dlog.error(invocation.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_IN_LOCK_WITH_RESTRICTED_VAR_USAGE); } } if (copyInLockInfoStack.empty()) { return; } BLangLock lastCheckedLockNode = lockNode; for (int i = copyInLockInfoStack.size() - 1; i >= 0; i--) { LockInfo prevCopyInLockInfo = copyInLockInfoStack.get(i); BLangLock outerLockNode = prevCopyInLockInfo.lockNode; if (!isEnclosedLockWithinSameFunction(lastCheckedLockNode, outerLockNode)) { return; } lastCheckedLockNode = outerLockNode; Map<BSymbol, List<BLangSimpleVarRef>> prevLockAccessedRestrictedVars = prevCopyInLockInfo.accessedRestrictedVars; for (Map.Entry<BSymbol, List<BLangSimpleVarRef>> entry : accessedRestrictedVars.entrySet()) { BSymbol key = entry.getKey(); if (prevLockAccessedRestrictedVars.containsKey(key)) { prevLockAccessedRestrictedVars.get(key).addAll(entry.getValue()); continue; } prevLockAccessedRestrictedVars.put(key, entry.getValue()); } prevCopyInLockInfo.accessedPotentiallyIsolatedVars.addAll(accessedNonImmutableAndNonIsolatedVars); if (!accessedRestrictedVars.isEmpty()) { continue; } prevCopyInLockInfo.nonCaptureBindingPatternVarRefsOnLhs.addAll( copyInLockInfo.nonCaptureBindingPatternVarRefsOnLhs); prevCopyInLockInfo.nonIsolatedTransferInExpressions.addAll(copyInLockInfo.nonIsolatedTransferInExpressions); prevCopyInLockInfo.nonIsolatedTransferOutExpressions.addAll( copyInLockInfo.nonIsolatedTransferOutExpressions); prevCopyInLockInfo.nonIsolatedInvocations.addAll(copyInLockInfo.nonIsolatedInvocations); prevCopyInLockInfo.accessedPotentiallyIsolatedVars.addAll(copyInLockInfo.accessedPotentiallyIsolatedVars); } } @Override public void visit(BLangTransaction transactionNode) { analyzeNode(transactionNode.transactionBody, env); } @Override public void visit(BLangTupleDestructure stmt) { BLangTupleVarRef varRef = stmt.varRef; BLangExpression expr = stmt.expr; analyzeNode(varRef, env); analyzeNode(expr, env); validateTransferOut(varRef, expr); } @Override public void visit(BLangRecordDestructure stmt) { BLangRecordVarRef varRef = stmt.varRef; BLangExpression expr = stmt.expr; analyzeNode(varRef, env); analyzeNode(expr, env); validateTransferOut(varRef, expr); } @Override public void visit(BLangErrorDestructure stmt) { BLangErrorVarRef varRef = stmt.varRef; BLangExpression expr = stmt.expr; analyzeNode(varRef, env); analyzeNode(expr, env); validateTransferOut(varRef, expr); } @Override public void visit(BLangForkJoin forkJoin) { markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(forkJoin.pos, DiagnosticErrorCode.INVALID_FORK_STATEMENT_IN_ISOLATED_FUNCTION); } } @Override public void visit(BLangWorkerSend workerSendNode) { } @Override public void visit(BLangWorkerReceive workerReceiveNode) { } @Override public void visit(BLangRollback rollbackNode) { analyzeNode(rollbackNode.expr, env); } @Override public void visit(BLangLiteral literalExpr) { } @Override public void visit(BLangConstRef constRef) { } @Override public void visit(BLangNumericLiteral literalExpr) { } @Override public void visit(BLangRecordLiteral recordLiteral) { for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValuePair = (BLangRecordLiteral.BLangRecordKeyValueField) field; if (keyValuePair.key.computedKey) { analyzeNode(keyValuePair.key.expr, env); } analyzeNode(keyValuePair.valueExpr, env); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { analyzeNode((BLangRecordLiteral.BLangRecordVarNameField) field, env); } else { analyzeNode(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env); } } } @Override public void visit(BLangTupleVarRef varRefExpr) { for (BLangExpression expression : varRefExpr.expressions) { analyzeNode(expression, env); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { analyzeNode(restParam, env); } } @Override public void visit(BLangRecordVarRef varRefExpr) { for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { analyzeNode(recordRefField.variableReference, env); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { analyzeNode(restParam, env); } } @Override public void visit(BLangErrorVarRef varRefExpr) { analyzeNode(varRefExpr.message, env); BLangVariableReference cause = varRefExpr.cause; if (cause != null) { analyzeNode(cause, env); } for (BLangNamedArgsExpression namedArgsExpression : varRefExpr.detail) { analyzeNode(namedArgsExpression, env); } BLangVariableReference restVar = varRefExpr.restVar; if (restVar != null) { analyzeNode(restVar, env); } BLangType typeNode = varRefExpr.typeNode; if (typeNode != null) { analyzeNode(typeNode, env); } } @Override public void visit(BLangSimpleVarRef varRefExpr) { BType accessType = varRefExpr.getBType(); BSymbol symbol = varRefExpr.symbol; BLangInvokableNode enclInvokable = env.enclInvokable; BLangType enclType = env.enclType; if (symbol == null) { return; } BLangNode parent = varRefExpr.parent; boolean isolatedModuleVariableReference = isIsolatedModuleVariableSymbol(symbol); boolean accessOfPotentiallyIsolatedVariable = false; boolean accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable = false; Set<BSymbol> inferableClasses = new HashSet<>(); if ((symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { accessOfPotentiallyIsolatedVariable = this.isolationInferenceInfoMap.containsKey(symbol) && this.isolationInferenceInfoMap.get(symbol).getKind() != IsolationInferenceKind.FUNCTION; accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable = Symbols.isFlagOn(symbol.flags, Flags.FINAL) && !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(accessType) && isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(symbol.owner, accessType, inferableClasses); } if (inLockStatement) { LockInfo exprInfo = copyInLockInfoStack.peek(); if (isolatedModuleVariableReference || isMethodCallOnSelfInIsolatedObject(varRefExpr, parent)) { addToAccessedRestrictedVars(exprInfo.accessedRestrictedVars, varRefExpr); } if (parent == null && varRefExpr.isLValue) { if (!isSelfOfObject(varRefExpr) && isInvalidCopyIn(varRefExpr, env)) { exprInfo.nonCaptureBindingPatternVarRefsOnLhs.add(varRefExpr); } } else if ((!varRefExpr.isLValue || parent.getKind() != NodeKind.ASSIGNMENT) && !isIsolated(varRefExpr.symbol.flags) && !isSelfOfIsolatedObject(varRefExpr) && isInvalidCopyIn(varRefExpr, env)) { exprInfo.nonIsolatedTransferInExpressions.add(varRefExpr); } if (accessOfPotentiallyIsolatedVariable) { ((VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(symbol)).accessedLockInfo .add(exprInfo); exprInfo.accessedPotentiallyIsolatedVars.add(symbol); } } else if (accessOfPotentiallyIsolatedVariable || accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable) { VariableIsolationInferenceInfo inferenceInfo = (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(symbol); inferenceInfo.accessedOutsideLockStatement = true; inferenceInfo.accessOutsideLockStatementValidIfInferredIsolated = false; if (accessOfPotentiallyReadOnlyOrIsolatedObjectTypedFinalVariable) { inferenceInfo.dependsOnVariablesAndClasses.addAll(inferableClasses); } } boolean inIsolatedFunction = isInIsolatedFunction(enclInvokable); boolean recordFieldDefaultValue = isRecordFieldDefaultValue(enclType); boolean objectFieldDefaultValueRequiringIsolation = !recordFieldDefaultValue && isObjectFieldDefaultValueRequiringIsolation(env); SymbolEnv enclEnv = env.enclEnv; if (inIsolatedFunction) { if (enclInvokable == null) { BLangArrowFunction bLangArrowFunction = (BLangArrowFunction) enclEnv.node; for (BLangSimpleVariable param : bLangArrowFunction.params) { if (param.symbol == symbol) { return; } } } } if (!recordFieldDefaultValue && !objectFieldDefaultValueRequiringIsolation && enclInvokable != null && isReferenceToVarDefinedInSameInvokable(symbol.owner, enclInvokable.symbol)) { return; } long flags = symbol.flags; if (Symbols.isFlagOn(flags, Flags.CONSTANT)) { return; } if ((Symbols.isFlagOn(flags, Flags.FINAL) || Symbols.isFlagOn(flags, Flags.FUNCTION_FINAL)) && types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(accessType)) { return; } if (isDefinitionReference(symbol)) { return; } if (enclEnv != null && enclEnv.node != null && enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction bLangArrowFunction = (BLangArrowFunction) enclEnv.node; for (BLangSimpleVariable param : bLangArrowFunction.params) { if (param.symbol == symbol) { return; } } } if (isolatedModuleVariableReference) { if (!inLockStatement) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_ISOLATED_VARIABLE_ACCESS_OUTSIDE_LOCK); } return; } if (accessOfPotentiallyIsolatedVariable) { markDependentlyIsolatedOnVar(symbol); } else { markDependsOnIsolationNonInferableConstructs(); } inferredIsolated = false; if (inIsolatedFunction) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_ACCESS_IN_ISOLATED_FUNCTION); return; } if (recordFieldDefaultValue) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_ACCESS_AS_RECORD_DEFAULT); return; } if (objectFieldDefaultValueRequiringIsolation) { dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_ACCESS_AS_OBJECT_DEFAULT); return; } if (isObjectFieldDefaultValue(env)) { BLangFunction initFunction = ((BLangClassDefinition) env.node).initFunction; if (initFunction != null) { markInitMethodDependentlyIsolatedOnVar(initFunction, symbol); } } } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; analyzeNode(expr, env); if (!isInvalidIsolatedObjectFieldOrMethodAccessViaSelfIfOutsideLock(fieldAccessExpr, true)) { BType bType = expr.getBType(); BTypeSymbol tsymbol = bType.tsymbol; BLangIdentifier field = fieldAccessExpr.field; if (!isPotentiallyProtectedFieldAccessedInNonInitMethod(expr, tsymbol, field)) { return; } if (inLockStatement) { LockInfo lockInfo = copyInLockInfoStack.peek(); ((VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol)).accessedLockInfo .add(lockInfo); lockInfo.accessedPotentiallyIsolatedVars.add(tsymbol); return; } VariableIsolationInferenceInfo inferenceInfo = (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol); inferenceInfo.accessedOutsideLockStatement = true; BType fieldType = fieldAccessExpr.getBType(); if (Symbols.isFlagOn(((BObjectType) bType).fields.get(field.value).symbol.flags, Flags.FINAL) && isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(env.enclPkg.symbol, fieldType)) { inferenceInfo.typesOfFinalFieldsAccessedOutsideLock.add(fieldType); } else { inferenceInfo.accessOutsideLockStatementValidIfInferredIsolated = false; } return; } if (inLockStatement) { addToAccessedRestrictedVars(copyInLockInfoStack.peek().accessedRestrictedVars, (BLangSimpleVarRef) expr); return; } dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_MUTABLE_FIELD_ACCESS_IN_ISOLATED_OBJECT_OUTSIDE_LOCK); } private boolean isPotentiallyProtectedFieldAccessedInNonInitMethod(BLangExpression expr, BTypeSymbol tsymbol, BLangIdentifier field) { return expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && isSelfOfObject((BLangSimpleVarRef) expr) && this.isolationInferenceInfoMap.containsKey(tsymbol) && !inObjectInitMethod() && ((ClassIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol)) .protectedFields.contains(field); } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { analyzeNode(indexAccessExpr.expr, env); analyzeNode(indexAccessExpr.indexExpr, env); } @Override public void visit(BLangInvocation invocationExpr) { analyzeInvocation(invocationExpr); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { for (BLangExpression positionalArg : errorConstructorExpr.positionalArgs) { analyzeNode(positionalArg, env); } for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) { analyzeNode(namedArgsExpression, env); } } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocationExpr) { if (!actionInvocationExpr.async) { analyzeInvocation(actionInvocationExpr); return; } markDependsOnIsolationNonInferableConstructs(); inferredIsolated = false; if (actionInvocationExpr.functionPointerInvocation) { return; } if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(actionInvocationExpr.pos, DiagnosticErrorCode.INVALID_ASYNC_INVOCATION_IN_ISOLATED_FUNCTION); } } @Override public void visit(BLangTypeInit typeInitExpr) { BInvokableSymbol initInvocationSymbol = (BInvokableSymbol) typeInitExpr.initInvocation.symbol; if (initInvocationSymbol != null && !isIsolated(initInvocationSymbol.flags)) { analyzeFunctionForInference(initInvocationSymbol); inferredIsolated = false; if (isInIsolatedFunction(env.enclInvokable)) { dlog.error(typeInitExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INIT_EXPRESSION_IN_ISOLATED_FUNCTION); } else if (isRecordFieldDefaultValue(env.enclType)) { dlog.error(typeInitExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INIT_EXPRESSION_AS_RECORD_DEFAULT); } else if (isObjectFieldDefaultValueRequiringIsolation(env)) { dlog.error(typeInitExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INIT_EXPRESSION_AS_OBJECT_DEFAULT); } else if (isObjectFieldDefaultValue(env)) { BLangFunction initFunction = ((BLangClassDefinition) env.node).initFunction; if (initFunction != null) { markInitMethodDependentlyIsolatedOnFunction(initFunction, initInvocationSymbol); } } } for (BLangExpression expression : typeInitExpr.argsExpr) { analyzeNode(expression, env); } } @Override public void visit(BLangTernaryExpr ternaryExpr) { analyzeNode(ternaryExpr.expr, env); analyzeNode(ternaryExpr.thenExpr, env); analyzeNode(ternaryExpr.elseExpr, env); } @Override public void visit(BLangWaitExpr waitExpr) { for (BLangExpression expression : waitExpr.exprList) { analyzeNode(expression, env); } } @Override public void visit(BLangTrapExpr trapExpr) { analyzeNode(trapExpr.expr, env); } @Override public void visit(BLangBinaryExpr binaryExpr) { analyzeNode(binaryExpr.lhsExpr, env); analyzeNode(binaryExpr.rhsExpr, env); } @Override public void visit(BLangElvisExpr elvisExpr) { analyzeNode(elvisExpr.lhsExpr, env); analyzeNode(elvisExpr.rhsExpr, env); } @Override public void visit(BLangGroupExpr groupExpr) { analyzeNode(groupExpr.expression, env); } @Override public void visit(BLangLetExpression letExpr) { isolatedLetVarStack.push(new HashSet<>()); for (BLangLetVariable letVarDeclaration : letExpr.letVarDeclarations) { analyzeNode((BLangNode) letVarDeclaration.definitionNode, env); } analyzeNode(letExpr.expr, env); isolatedLetVarStack.pop(); } @Override public void visit(BLangLetVariable letVariable) { analyzeNode((BLangNode) letVariable.definitionNode.getVariable(), env); } @Override public void visit(BLangListConstructorExpr listConstructorExpr) { for (BLangExpression expr : listConstructorExpr.exprs) { analyzeNode(expr, env); } } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { analyzeNode(recordLiteral, env); } } @Override public void visit(BLangUnaryExpr unaryExpr) { analyzeNode(unaryExpr.expr, env); } @Override public void visit(BLangTypedescExpr typedescExpr) { analyzeNode(typedescExpr.typeNode, env); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { analyzeNode(conversionExpr.typeNode, env); analyzeNode(conversionExpr.expr, env); } @Override public void visit(BLangXMLQName xmlQName) { } @Override public void visit(BLangXMLAttribute xmlAttribute) { analyzeNode(xmlAttribute.name, env); analyzeNode(xmlAttribute.value, env); } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { for (BLangExpression child : xmlElementLiteral.children) { analyzeNode(child, env); } for (BLangXMLAttribute attribute : xmlElementLiteral.attributes) { analyzeNode(attribute, env); } for (BLangXMLNS inlineNamespace : xmlElementLiteral.inlineNamespaces) { analyzeNode(inlineNamespace, env); } } @Override public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) { for (BLangExpression expr : xmlSequenceLiteral.xmlItems) { analyzeNode(expr, env); } } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { for (BLangExpression expr : xmlTextLiteral.textFragments) { analyzeNode(expr, env); } } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { for (BLangExpression textFragment : xmlCommentLiteral.textFragments) { analyzeNode(textFragment, env); } } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { for (BLangExpression dataFragment : xmlProcInsLiteral.dataFragments) { analyzeNode(dataFragment, env); } } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { for (BLangExpression textFragment : xmlQuotedString.textFragments) { analyzeNode(textFragment, env); } } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { for (BLangExpression expr : stringTemplateLiteral.exprs) { analyzeNode(expr, env); } } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { for (BLangExpression insertion : rawTemplateLiteral.insertions) { analyzeNode(insertion, env); } } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { } @Override public void visit(BLangArrowFunction bLangArrowFunction) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); createTempSymbolIfNonExistent(bLangArrowFunction); analyzeNode(bLangArrowFunction.body, arrowFunctionEnv); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { analyzeNode(intRangeExpression.startExpr, env); analyzeNode(intRangeExpression.endExpr, env); } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { analyzeNode(bLangVarArgsExpression.expr, env); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { analyzeNode(bLangNamedArgsExpression.expr, env); } @Override public void visit(BLangCheckedExpr checkedExpr) { analyzeNode(checkedExpr.expr, env); } @Override public void visit(BLangDo doNode) { analyzeNode(doNode.body, env); if (doNode.onFailClause != null) { analyzeNode(doNode.onFailClause, env); } } @Override public void visit(BLangFail failExpr) { analyzeNode(failExpr.expr, env); } @Override public void visit(BLangCheckPanickedExpr checkPanickedExpr) { analyzeNode(checkPanickedExpr.expr, env); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { analyzeNode(serviceConstructorExpr.serviceNode, env); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { analyzeNode(typeTestExpr.expr, env); analyzeNode(typeTestExpr.typeNode, env); } @Override public void visit(BLangIgnoreExpr ignoreExpr) { } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { analyzeNode(annotAccessExpr.expr, env); } @Override public void visit(BLangQueryExpr queryExpr) { for (BLangNode clause : queryExpr.getQueryClauses()) { analyzeNode(clause, env); } } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { for (BLangExpression value : tableMultiKeyExpr.multiKeyIndexExprs) { analyzeNode(value, env); } } @Override public void visit(BLangTransactionalExpr transactionalExpr) { } @Override public void visit(BLangCommitExpr commitExpr) { } @Override public void visit(BLangValueType valueType) { } @Override public void visit(BLangArrayType arrayType) { analyzeNode(arrayType.getElementType(), env); } @Override public void visit(BLangBuiltInRefTypeNode builtInRefType) { } @Override public void visit(BLangConstrainedType constrainedType) { analyzeNode(constrainedType.constraint, env); } @Override public void visit(BLangStreamType streamType) { analyzeNode(streamType.constraint, env); analyzeNode(streamType.error, env); } @Override public void visit(BLangTableTypeNode tableType) { analyzeNode(tableType.constraint, env); if (tableType.tableKeyTypeConstraint != null) { analyzeNode(tableType.tableKeyTypeConstraint.keyType, env); } } @Override public void visit(BLangUserDefinedType userDefinedType) { } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { for (BLangVariable param : functionTypeNode.params) { analyzeNode(param.typeNode, env); } if (functionTypeNode.restParam != null) { analyzeNode(functionTypeNode.restParam.typeNode, env); } analyzeNode(functionTypeNode.returnTypeNode, env); } @Override public void visit(BLangUnionTypeNode unionTypeNode) { for (BLangType memberTypeNode : unionTypeNode.memberTypeNodes) { analyzeNode(memberTypeNode, env); } } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) { analyzeNode(constituentTypeNode, env); } } @Override public void visit(BLangObjectTypeNode objectTypeNode) { SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env); for (BLangSimpleVariable field : objectTypeNode.fields) { analyzeNode(field, objectEnv); } for (BLangSimpleVariable referencedField : objectTypeNode.includedFields) { analyzeNode(referencedField, objectEnv); } BLangFunction initFunction = objectTypeNode.initFunction; if (initFunction != null) { analyzeNode(initFunction, objectEnv); } for (BLangFunction function : objectTypeNode.functions) { analyzeNode(function, objectEnv); } } @Override public void visit(BLangClassDefinition classDefinition) { SymbolEnv classEnv = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, env); for (BLangSimpleVariable bLangSimpleVariable : classDefinition.fields) { analyzeNode(bLangSimpleVariable, classEnv); } for (BLangSimpleVariable field : classDefinition.referencedFields) { analyzeNode(field, classEnv); } BLangFunction initFunction = classDefinition.initFunction; if (initFunction != null) { analyzeNode(initFunction, classEnv); } for (BLangFunction function : classDefinition.functions) { analyzeNode(function, classEnv); } } @Override public void visit(BLangObjectConstructorExpression objectConstructorExpression) { visit(objectConstructorExpression.typeInit); } @Override public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) { } @Override public void visit(BLangRecordTypeNode recordTypeNode) { SymbolEnv typeEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env); for (BLangSimpleVariable field : recordTypeNode.fields) { analyzeNode(field, typeEnv); } for (BLangSimpleVariable referencedField : recordTypeNode.includedFields) { analyzeNode(referencedField, typeEnv); } BLangType restFieldType = recordTypeNode.restFieldType; if (restFieldType != null) { analyzeNode(restFieldType, typeEnv); } } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { for (BLangExpression expression : finiteTypeNode.valueSpace) { analyzeNode(expression, env); } } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { for (BLangType memberTypeNode : tupleTypeNode.memberTypeNodes) { analyzeNode(memberTypeNode, env); } analyzeNode(tupleTypeNode.restParamType, env); } @Override public void visit(BLangErrorType errorTypeNode) { analyzeNode(errorTypeNode.detailType, env); } @Override public void visit(BLangTupleVariable bLangTupleVariable) { analyzeNode(bLangTupleVariable.typeNode, env); BLangExpression expr = bLangTupleVariable.expr; if (expr != null) { analyzeNode(expr, env); } } @Override public void visit(BLangTupleVariableDef bLangTupleVariableDef) { analyzeNode(bLangTupleVariableDef.var, env); } @Override public void visit(BLangRecordVariable bLangRecordVariable) { analyzeNode(bLangRecordVariable.typeNode, env); BLangExpression expr = bLangRecordVariable.expr; if (expr != null) { analyzeNode(expr, env); } } @Override public void visit(BLangRecordVariableDef bLangRecordVariableDef) { analyzeNode(bLangRecordVariableDef.var, env); } @Override public void visit(BLangErrorVariable bLangErrorVariable) { analyzeNode(bLangErrorVariable.typeNode, env); analyzeNode(bLangErrorVariable.expr, env); for (BLangErrorVariable.BLangErrorDetailEntry bLangErrorDetailEntry : bLangErrorVariable.detail) { analyzeNode(bLangErrorDetailEntry.valueBindingPattern, env); } } @Override public void visit(BLangErrorVariableDef bLangErrorVariableDef) { analyzeNode(bLangErrorVariableDef.errorVariable, env); } @Override public void visit(BLangMatch.BLangMatchStaticBindingPatternClause matchStaticBindingPatternClause) { analyzeNode(matchStaticBindingPatternClause.body, env); } @Override public void visit(BLangMatch.BLangMatchStructuredBindingPatternClause matchStmtStructuredBindingPatternClause) { analyzeNode(matchStmtStructuredBindingPatternClause.bindingPatternVariable, env); BLangExpression typeGuardExpr = matchStmtStructuredBindingPatternClause.typeGuardExpr; if (typeGuardExpr != null) { analyzeNode(typeGuardExpr, env); } analyzeNode(matchStmtStructuredBindingPatternClause.body, env); } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { } @Override public void visit(BLangWaitForAllExpr waitForAllExpr) { for (BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair : waitForAllExpr.keyValuePairs) { analyzeNode(keyValuePair, env); } } @Override public void visit(BLangWaitForAllExpr.BLangWaitKeyValue waitKeyValue) { BLangExpression keyExpr = waitKeyValue.keyExpr; if (keyExpr != null) { analyzeNode(keyExpr, env); } BLangExpression valueExpr = waitKeyValue.valueExpr; if (valueExpr != null) { analyzeNode(valueExpr, env); } } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { BLangExpression childIndex = xmlNavigation.childIndex; if (childIndex != null) { analyzeNode(childIndex, env); } } private void analyzeInvocation(BLangInvocation invocationExpr) { List<BLangExpression> requiredArgs = invocationExpr.requiredArgs; List<BLangExpression> restArgs = invocationExpr.restArgs; BLangExpression expr = invocationExpr.expr; if (expr != null && (requiredArgs.isEmpty() || requiredArgs.get(0) != expr)) { analyzeNode(expr, env); } BInvokableSymbol symbol = (BInvokableSymbol) invocationExpr.symbol; if (symbol == null) { analyzeArgs(requiredArgs, restArgs); return; } boolean inIsolatedFunction = isInIsolatedFunction(env.enclInvokable); boolean recordFieldDefaultValue = isRecordFieldDefaultValue(env.enclType); boolean objectFieldDefaultValueRequiringIsolation = isObjectFieldDefaultValueRequiringIsolation(env); boolean expectsIsolation = inIsolatedFunction || recordFieldDefaultValue || objectFieldDefaultValueRequiringIsolation; boolean isolatedFunctionCall = isIsolated(symbol.type.flags); if (isolatedFunctionCall) { analyzeArgIsolatedness(invocationExpr, requiredArgs, restArgs, symbol, expectsIsolation); return; } analyzeArgs(requiredArgs, restArgs); if (inLockStatement) { copyInLockInfoStack.peek().nonIsolatedInvocations.add(invocationExpr); } long flags = symbol.flags; if (Symbols.isFlagOn(flags, Flags.ISOLATED_PARAM)) { return; } analyzeFunctionForInference(symbol); inferredIsolated = false; if (inIsolatedFunction) { dlog.error(invocationExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_IN_ISOLATED_FUNCTION); return; } if (recordFieldDefaultValue) { dlog.error(invocationExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_AS_RECORD_DEFAULT); } if (objectFieldDefaultValueRequiringIsolation) { dlog.error(invocationExpr.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_INVOCATION_AS_OBJECT_DEFAULT); } else if (isObjectFieldDefaultValue(env)) { BLangFunction initFunction = ((BLangClassDefinition) env.node).initFunction; if (initFunction != null) { markInitMethodDependentlyIsolatedOnFunction(initFunction, symbol); } } } private void analyzeArgs(List<BLangExpression> requiredArgs, List<BLangExpression> restArgs) { List<BLangExpression> args = new ArrayList<>(requiredArgs); args.addAll(restArgs); for (BLangExpression argExpr : args) { analyzeNode(argExpr, env); } } private void analyzeAndSetArrowFuncFlagForIsolatedParamArg(BLangExpression arg) { if (arg.getKind() == NodeKind.REST_ARGS_EXPR) { BLangExpression expr = ((BLangRestArgsExpression) arg).expr; if (expr.getKind() != NodeKind.LIST_CONSTRUCTOR_EXPR) { analyzeNode(arg, env); return; } for (BLangExpression expression : ((BLangListConstructorExpr) expr).exprs) { analyzeAndSetArrowFuncFlagForIsolatedParamArg(expression); } return; } boolean namedArg = arg.getKind() == NodeKind.NAMED_ARGS_EXPR; BLangExpression argExpr = namedArg ? ((BLangNamedArgsExpression) arg).expr : arg; if (argExpr.getKind() != NodeKind.ARROW_EXPR) { analyzeNode(argExpr, env); return; } boolean prevInferredIsolatedness = this.inferredIsolated; this.inferredIsolated = true; analyzeNode(argExpr, env); if (this.inferredIsolated) { BInvokableType invokableType = (BInvokableType) argExpr.getBType(); BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol; BInvokableTypeSymbol dupInvokableTypeSymbol = new BInvokableTypeSymbol(tsymbol.tag, tsymbol.flags | Flags.ISOLATED, tsymbol.pkgID, null, tsymbol.owner, tsymbol.pos, tsymbol.origin); dupInvokableTypeSymbol.params = tsymbol.params == null ? null : new ArrayList<>(tsymbol.params); BInvokableType dupInvokableType = new BInvokableType(invokableType.paramTypes, invokableType.restType, invokableType.retType, dupInvokableTypeSymbol); dupInvokableType.flags |= Flags.ISOLATED; dupInvokableTypeSymbol.type = dupInvokableType; argExpr.setBType(dupInvokableType); if (namedArg) { arg.setBType(dupInvokableType); } } this.inferredIsolated = prevInferredIsolatedness && this.inferredIsolated; } private void analyzeArgIsolatedness(BLangInvocation invocationExpr, List<BLangExpression> requiredArgs, List<BLangExpression> restArgs, BInvokableSymbol symbol, boolean expectsIsolation) { List<BVarSymbol> params = symbol.params; int paramsCount = params.size(); if (restArgs.isEmpty()) { int nextParamIndex = 0; for (BLangExpression arg : requiredArgs) { if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { BVarSymbol varSymbol = params.get(nextParamIndex++); if (!Symbols.isFlagOn(varSymbol.flags, Flags.ISOLATED_PARAM)) { analyzeNode(arg, env); continue; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); continue; } String name = ((BLangNamedArgsExpression) arg).name.value; for (BVarSymbol param : params) { if (!param.name.value.equals(name)) { continue; } if (!Symbols.isFlagOn(param.flags, Flags.ISOLATED_PARAM)) { analyzeNode(arg, env); continue; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); } } return; } int reqArgCount = requiredArgs.size(); for (int i = 0; i < reqArgCount; i++) { BLangExpression arg = requiredArgs.get(i); if (!Symbols.isFlagOn(params.get(i).flags, Flags.ISOLATED_PARAM)) { analyzeNode(arg, env); continue; } if (arg.getBType() == symTable.semanticError) { continue; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); } if (restArgs.get(restArgs.size() - 1).getKind() == NodeKind.REST_ARGS_EXPR) { BLangRestArgsExpression varArg = (BLangRestArgsExpression) restArgs.get(restArgs.size() - 1); BType varArgType = varArg.getBType(); Location varArgPos = varArg.pos; if (varArgType == symTable.semanticError) { return; } if (reqArgCount == paramsCount) { if (!Symbols.isFlagOn(symbol.restParam.flags, Flags.ISOLATED_PARAM)) { analyzeNode(varArg, env); return; } analyzeAndSetArrowFuncFlagForIsolatedParamArg(varArg); analyzeVarArgIsolatedness(invocationExpr, varArg, varArgPos, expectsIsolation); return; } if (reqArgCount < paramsCount) { BTupleType tupleType = (BTupleType) varArgType; List<BType> memberTypes = tupleType.tupleTypes; BLangExpression varArgExpr = varArg.expr; boolean listConstrVarArg = varArgExpr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR; BLangListConstructorExpr listConstructorExpr = listConstrVarArg ? (BLangListConstructorExpr) varArgExpr : null; if (!listConstrVarArg) { analyzeNode(varArg, env); } int tupleIndex = 0; for (int i = reqArgCount; i < paramsCount; i++) { if (!Symbols.isFlagOn(params.get(i).flags, Flags.ISOLATED_PARAM)) { if (listConstrVarArg) { analyzeNode(listConstructorExpr.exprs.get(tupleIndex), env); } tupleIndex++; continue; } BType type = memberTypes.get(tupleIndex); BLangExpression arg = null; if (listConstrVarArg) { arg = listConstructorExpr.exprs.get(tupleIndex); analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); type = arg.getBType(); } handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, type, varArgPos); tupleIndex++; } BVarSymbol restParam = symbol.restParam; if (restParam == null) { return; } if (!Symbols.isFlagOn(restParam.flags, Flags.ISOLATED_PARAM)) { if (listConstructorExpr == null) { return; } List<BLangExpression> exprs = listConstructorExpr.exprs; for (int i = tupleIndex; i < exprs.size(); i++) { analyzeNode(exprs.get(i), env); } return; } int memberTypeCount = memberTypes.size(); if (tupleIndex < memberTypeCount) { for (int i = tupleIndex; i < memberTypeCount; i++) { BType type = memberTypes.get(i); BLangExpression arg = null; if (listConstrVarArg) { arg = listConstructorExpr.exprs.get(i); analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); type = arg.getBType(); } handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, type, varArgPos); } } if (listConstrVarArg) { List<BLangExpression> exprs = listConstructorExpr.exprs; for (int i = tupleIndex; i < exprs.size(); i++) { BLangExpression arg = exprs.get(i); analyzeAndSetArrowFuncFlagForIsolatedParamArg(arg); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), varArgPos); } return; } BType tupleRestType = tupleType.restType; if (tupleRestType == null) { return; } handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, tupleRestType, varArgPos); return; } } if (!Symbols.isFlagOn(symbol.restParam.flags, Flags.ISOLATED_PARAM)) { for (BLangExpression restArg : restArgs) { analyzeNode(restArg, env); } return; } analyzeRestArgsForRestParam(invocationExpr, restArgs, symbol, expectsIsolation); } private void analyzeRestArgsForRestParam(BLangInvocation invocationExpr, List<BLangExpression> restArgs, BInvokableSymbol symbol, boolean expectsIsolation) { if (Symbols.isFlagOn(((BArrayType) symbol.restParam.type).eType.flags, Flags.ISOLATED)) { for (BLangExpression restArg : restArgs) { analyzeNode(restArg, env); } return; } for (BLangExpression restArg : restArgs) { analyzeAndSetArrowFuncFlagForIsolatedParamArg(restArg); } int size = restArgs.size(); BLangExpression lastArg = restArgs.get(size - 1); boolean lastArgIsVarArg = lastArg.getKind() == NodeKind.REST_ARGS_EXPR; for (int i = 0; i < (lastArgIsVarArg ? size - 1 : size); i++) { BLangExpression arg = restArgs.get(i); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, arg, expectsIsolation, arg.getBType(), arg.pos); } if (lastArgIsVarArg) { analyzeVarArgIsolatedness(invocationExpr, (BLangRestArgsExpression) lastArg, lastArg.pos, expectsIsolation); } } private void analyzeVarArgIsolatedness(BLangInvocation invocationExpr, BLangRestArgsExpression restArgsExpression, Location pos, boolean expectsIsolation) { BLangExpression expr = restArgsExpression.expr; if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) { for (BLangExpression expression : ((BLangListConstructorExpr) expr).exprs) { analyzeAndSetArrowFuncFlagForIsolatedParamArg(expression); handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, expression, expectsIsolation, expression.getBType(), pos); } return; } BType varArgType = restArgsExpression.getBType(); if (varArgType.tag == TypeTags.ARRAY) { handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, ((BArrayType) varArgType).eType, pos); return; } BTupleType tupleType = (BTupleType) varArgType; for (BType type : tupleType.tupleTypes) { handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, type, pos); } BType restType = tupleType.restType; if (restType != null) { handleNonExplicitlyIsolatedArgForIsolatedParam(invocationExpr, null, expectsIsolation, restType, pos); } } private void handleNonExplicitlyIsolatedArgForIsolatedParam(BLangInvocation invocationExpr, BLangExpression expr, boolean expectsIsolation, BType type, Location pos) { if (Symbols.isFlagOn(type.flags, Flags.ISOLATED)) { return; } this.inferredIsolated = false; if (expectsIsolation) { dlog.error(pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_FUNCTION_AS_ARGUMENT); return; } if (expr != null) { NodeKind kind = expr.getKind(); if (kind == NodeKind.LAMBDA) { markFunctionDependentlyIsolatedOnFunction(env.enclInvokable, ((BLangLambdaFunction) expr).function.symbol); } else if (kind == NodeKind.ARROW_EXPR) { markFunctionDependentlyIsolatedOnFunction(env.enclInvokable, createTempSymbolIfNonExistent((BLangArrowFunction) expr)); } } else { markDependsOnIsolationNonInferableConstructs(); } if (inLockStatement) { copyInLockInfoStack.peek().nonIsolatedInvocations.add(invocationExpr); } } private boolean isInIsolatedFunction(BLangInvokableNode enclInvokable) { if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return false; } return isIsolated(((BLangArrowFunction) env.enclEnv.node).funcType.flags); } return isIsolated(enclInvokable.symbol.flags); } private boolean isRecordFieldDefaultValue(BLangType enclType) { if (enclType == null) { return false; } return enclType.getKind() == NodeKind.RECORD_TYPE; } private boolean isObjectFieldDefaultValueRequiringIsolation(SymbolEnv env) { if (!isObjectFieldDefaultValue(env)) { return false; } BLangClassDefinition classDefinition = (BLangClassDefinition) env.node; BLangFunction initFunction = classDefinition.initFunction; if (initFunction == null) { return true; } return isIsolated(initFunction.symbol.flags); } private boolean isObjectFieldDefaultValue(SymbolEnv env) { return env.node.getKind() == NodeKind.CLASS_DEFN; } private boolean isDefinitionReference(BSymbol symbol) { return Symbols.isTagOn(symbol, SymTag.TYPE_DEF) || Symbols.isTagOn(symbol, SymTag.FUNCTION); } private boolean isIsolated(long flags) { return Symbols.isFlagOn(flags, Flags.ISOLATED); } private boolean isIsolatedClassField() { BLangNode node = env.node; return node.getKind() == NodeKind.CLASS_DEFN && ((BLangClassDefinition) node).flagSet.contains(Flag.ISOLATED); } private boolean isExpectedToBeAPrivateField(BVarSymbol symbol, BType type) { return !Symbols.isFlagOn(symbol.flags, Flags.FINAL) || !types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type); } private boolean isIsolatedObjectFieldOrMethodAccessViaSelf(BLangFieldBasedAccess fieldAccessExpr, boolean ignoreInit) { BLangExpression expr = fieldAccessExpr.expr; if (expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } if (!isSelfOfObject((BLangSimpleVarRef) expr)) { return false; } return isInIsolatedObjectMethod(env, ignoreInit); } private boolean isInvalidIsolatedObjectFieldOrMethodAccessViaSelfIfOutsideLock( BLangFieldBasedAccess fieldAccessExpr, boolean ignoreInit) { if (!isIsolatedObjectFieldOrMethodAccessViaSelf(fieldAccessExpr, ignoreInit)) { return false; } BField field = ((BObjectType) env.enclInvokable.symbol.owner.type).fields.get(fieldAccessExpr.field.value); if (field == null) { return false; } return isExpectedToBeAPrivateField(field.symbol, field.type); } private void validateIsolatedExpression(BType type, BLangExpression expression) { if (types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type)) { return; } validateIsolatedExpression(expression); } private void validateIsolatedExpression(BLangExpression expression) { isIsolatedExpression(expression, true, true, new ArrayList<>()); } private boolean isIsolatedExpression(BLangExpression expression) { return isIsolatedExpression(expression, false, false, new ArrayList<>()); } private boolean isIsolatedExpression(BLangExpression expression, boolean logErrors, boolean visitRestOnError, List<BLangExpression> nonIsolatedLocations) { return isIsolatedExpression(expression, logErrors, visitRestOnError, nonIsolatedLocations, false, null, null, null); } private boolean isIsolatedExpression(BLangExpression expression, boolean logErrors, boolean visitRestOnError, List<BLangExpression> nonIsolatedExpressions, boolean inferring, Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, Set<BSymbol> unresolvedSymbols) { BType type = expression.getBType(); if (type != null && isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, inferring, type, unresolvedSymbols)) { return true; } switch (expression.getKind()) { case SIMPLE_VARIABLE_REF: if (isReferenceOfLetVarInitializedWithAnIsolatedExpression((BLangSimpleVarRef) expression)) { return true; } break; case LITERAL: case NUMERIC_LITERAL: return true; case LIST_CONSTRUCTOR_EXPR: for (BLangExpression expr : ((BLangListConstructorExpr) expression).exprs) { if (isIsolatedExpression(expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case TABLE_CONSTRUCTOR_EXPR: for (BLangRecordLiteral mappingConstr : ((BLangTableConstructorExpr) expression).recordLiteralList) { if (isIsolatedExpression(mappingConstr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case RECORD_LITERAL_EXPR: for (RecordLiteralNode.RecordField field : ((BLangRecordLiteral) expression).fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; if (key.computedKey) { if (!isIsolatedExpression(key.expr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } } if (isIsolatedExpression(keyValueField.valueExpr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { if (isIsolatedExpression(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (isIsolatedExpression((BLangRecordLiteral.BLangRecordVarNameField) field, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case XML_COMMENT_LITERAL: BLangXMLCommentLiteral commentLiteral = (BLangXMLCommentLiteral) expression; for (BLangExpression textFragment : commentLiteral.textFragments) { if (isIsolatedExpression(textFragment, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } BLangExpression commentLiteralConcatExpr = commentLiteral.concatExpr; if (commentLiteralConcatExpr == null) { return true; } return isIsolatedExpression(commentLiteralConcatExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case XML_TEXT_LITERAL: BLangXMLTextLiteral textLiteral = (BLangXMLTextLiteral) expression; for (BLangExpression textFragment : textLiteral.textFragments) { if (isIsolatedExpression(textFragment, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } BLangExpression textLiteralConcatExpr = textLiteral.concatExpr; if (textLiteralConcatExpr == null) { return true; } return isIsolatedExpression(textLiteralConcatExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case XML_PI_LITERAL: BLangXMLProcInsLiteral procInsLiteral = (BLangXMLProcInsLiteral) expression; for (BLangExpression dataFragment : procInsLiteral.dataFragments) { if (isIsolatedExpression(dataFragment, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } BLangExpression procInsLiteralConcatExpr = procInsLiteral.dataConcatExpr; if (procInsLiteralConcatExpr == null) { return true; } return isIsolatedExpression(procInsLiteralConcatExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case XML_ELEMENT_LITERAL: for (BLangExpression child : ((BLangXMLElementLiteral) expression).children) { if (isIsolatedExpression(child, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case XML_SEQUENCE_LITERAL: for (BLangExpression xmlItem : ((BLangXMLSequenceLiteral) expression).xmlItems) { if (isIsolatedExpression(xmlItem, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case RAW_TEMPLATE_LITERAL: for (BLangExpression insertion : ((BLangRawTemplateLiteral) expression).insertions) { if (isIsolatedExpression(insertion, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case STRING_TEMPLATE_LITERAL: for (BLangExpression expr : ((BLangStringTemplateLiteral) expression).exprs) { if (isIsolatedExpression(expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; case TYPE_CONVERSION_EXPR: return isIsolatedExpression(((BLangTypeConversionExpr) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case CHECK_EXPR: case CHECK_PANIC_EXPR: return isIsolatedExpression(((BLangCheckedExpr) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case TRAP_EXPR: return isIsolatedExpression(((BLangTrapExpr) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case TERNARY_EXPR: BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) expression; if (!isIsolatedExpression(ternaryExpr.expr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } if (!isIsolatedExpression(ternaryExpr.thenExpr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } return isIsolatedExpression(ternaryExpr.elseExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case ELVIS_EXPR: BLangElvisExpr elvisExpr = (BLangElvisExpr) expression; if (!isIsolatedExpression(elvisExpr.lhsExpr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError) { return false; } return isIsolatedExpression(elvisExpr.rhsExpr, logErrors, visitRestOnError, nonIsolatedExpressions); case LET_EXPR: return isIsolatedExpression(((BLangLetExpression) expression).expr, logErrors, visitRestOnError, nonIsolatedExpressions); case GROUP_EXPR: return isIsolatedExpression(((BLangGroupExpr) expression).expression, logErrors, visitRestOnError, nonIsolatedExpressions); case TYPE_INIT_EXPR: BLangTypeInit typeInitExpr = (BLangTypeInit) expression; if (typeInitExpr == null) { return true; } expression = typeInitExpr.initInvocation; break; case OBJECT_CTOR_EXPRESSION: var objectConstructorExpression = (BLangObjectConstructorExpression) expression; typeInitExpr = objectConstructorExpression.typeInit; if (typeInitExpr == null) { return true; } expression = typeInitExpr.initInvocation; break; } if (expression.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expression; if (isCloneOrCloneReadOnlyInvocation(invocation)) { return true; } BSymbol invocationSymbol = invocation.symbol; if (invocationSymbol == null) { List<BLangExpression> argExprs = invocation.argExprs; if (argExprs.isEmpty()) { return true; } return isIsolatedExpression(argExprs.get(0), logErrors, visitRestOnError, nonIsolatedExpressions); } else if (isIsolated(invocationSymbol.type.flags) || (inferring && this.isolationInferenceInfoMap.containsKey(invocationSymbol) && inferFunctionIsolation(invocationSymbol, this.isolationInferenceInfoMap.get(invocationSymbol), publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols))) { List<BLangExpression> requiredArgs = invocation.requiredArgs; BLangExpression calledOnExpr = invocation.expr; if (calledOnExpr != null && (requiredArgs.isEmpty() || calledOnExpr != requiredArgs.get(0)) && (!isIsolatedExpression(calledOnExpr, logErrors, visitRestOnError, nonIsolatedExpressions) && !logErrors && !visitRestOnError)) { return false; } for (BLangExpression requiredArg : requiredArgs) { if (requiredArg.getKind() == NodeKind.NAMED_ARGS_EXPR) { if (isIsolatedExpression(((BLangNamedArgsExpression) requiredArg).expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (isIsolatedExpression(requiredArg, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } for (BLangExpression restArg : invocation.restArgs) { if (restArg.getKind() == NodeKind.REST_ARGS_EXPR) { if (isIsolatedExpression(((BLangRestArgsExpression) restArg).expr, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } if (isIsolatedExpression(restArg, logErrors, visitRestOnError, nonIsolatedExpressions) || logErrors || visitRestOnError) { continue; } return false; } return true; } } if (logErrors) { dlog.error(expression.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_EXPRESSION_AS_INITIAL_VALUE); } else { nonIsolatedExpressions.add(expression); } return false; } private boolean isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, boolean inferring, BType type, Set<BSymbol> unresolvedSymbols) { if (types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type)) { return true; } if (!inferring) { return false; } BTypeSymbol tsymbol = type.tsymbol; int tag = type.tag; if (tag == TypeTags.OBJECT) { if (this.isolationInferenceInfoMap.containsKey(tsymbol)) { return inferVariableOrClassIsolation(publiclyExposedObjectTypes, classDefinitions, tsymbol, (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get(tsymbol), true, unresolvedSymbols); } return false; } if (tag != TypeTags.UNION) { return false; } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, memberType, unresolvedSymbols)) { return false; } } return true; } private boolean isDependentlyIsolatedExpressionKind(BLangExpression expression) { switch (expression.getKind()) { case LIST_CONSTRUCTOR_EXPR: case TABLE_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: case XML_COMMENT_LITERAL: case XML_TEXT_LITERAL: case XML_PI_LITERAL: case XML_ELEMENT_LITERAL: case XML_SEQUENCE_LITERAL: case RAW_TEMPLATE_LITERAL: case STRING_TEMPLATE_LITERAL: case TYPE_CONVERSION_EXPR: case CHECK_EXPR: case CHECK_PANIC_EXPR: case TRAP_EXPR: case TERNARY_EXPR: case ELVIS_EXPR: return true; case GROUP_EXPR: return isDependentlyIsolatedExpressionKind(((BLangGroupExpr) expression).expression); } return false; } private boolean isCloneOrCloneReadOnlyInvocation(BLangInvocation invocation) { if (!invocation.langLibInvocation) { return false; } String methodName = invocation.symbol.name.value; return invocation.symbol.pkgID.name.value.equals(VALUE_LANG_LIB) && (methodName.equals(CLONE_LANG_LIB_METHOD) || methodName.equals(CLONE_READONLY_LANG_LIB_METHOD)); } private boolean isInvalidTransferIn(BLangSimpleVarRef expression) { return isInvalidTransferIn(expression, isSelfOfObject(expression)); } private boolean isInvalidTransferIn(BLangExpression expression, boolean invokedOnSelf) { BLangNode parent = expression.parent; NodeKind parentExprKind = parent.getKind(); if (!(parent instanceof BLangExpression)) { return !isIsolatedExpression(expression); } BLangExpression parentExpression = (BLangExpression) parent; if (parentExprKind != NodeKind.INVOCATION) { if (!isSelfReference(expression) && isIsolatedExpression(expression)) { return false; } return isInvalidTransferIn(parentExpression, invokedOnSelf); } BLangInvocation invocation = (BLangInvocation) parentExpression; BLangExpression calledOnExpr = invocation.expr; if (calledOnExpr == expression) { if (isIsolatedExpression(expression)) { return false; } if (isCloneOrCloneReadOnlyInvocation(invocation)) { return false; } if (!invokedOnSelf && invocation.getBType().tag == TypeTags.NIL) { return true; } return isInvalidTransferIn(parentExpression, invokedOnSelf); } return !isIsolatedExpression(expression); } private void validateTransferOut(BLangExpression expression, List<BLangExpression> nonIsolatedCopyOutExpressions) { if (!isDependentlyIsolatedExpressionKind(expression)) { if (!isIsolatedExpression(expression)) { nonIsolatedCopyOutExpressions.add(expression); } return; } isIsolatedExpression(expression, false, true, nonIsolatedCopyOutExpressions); } private void validateTransferOutViaAssignment(BLangExpression expression, BLangExpression varRef, List<BLangExpression> nonIsolatedCopyOutLocations) { if (!hasRefDefinedOutsideLock(varRef)) { return; } validateTransferOut(expression, nonIsolatedCopyOutLocations); } private void validateTransferOut(BLangExpression varRef, BLangExpression expr) { if (!this.inLockStatement) { return; } validateTransferOutViaAssignment(expr, varRef, this.copyInLockInfoStack.peek().nonIsolatedTransferOutExpressions); } private boolean isSelfReference(BLangExpression expression) { return expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && isSelfOfObject((BLangSimpleVarRef) expression); } private boolean isSelfOfObject(BLangSimpleVarRef varRefExpr) { if (!Names.SELF.value.equals(varRefExpr.variableName.value)) { return false; } BSymbol symbol = varRefExpr.symbol; if (symbol == null) { return false; } BSymbol owner = symbol.owner; if (owner == null || ((owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE)) { return false; } return symbol == ((BInvokableSymbol) owner).receiverSymbol; } private boolean isSelfOfIsolatedObject(BLangSimpleVarRef varRefExpr) { return isSelfOfObject(varRefExpr) && isIsolated(varRefExpr.symbol.type.flags); } private boolean hasRefDefinedOutsideLock(BLangExpression variableReference) { switch (variableReference.getKind()) { case SIMPLE_VARIABLE_REF: BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) variableReference; return isDefinedOutsideLock(names.fromIdNode(simpleVarRef.variableName), simpleVarRef.symbol.tag, env); case RECORD_VARIABLE_REF: BLangRecordVarRef recordVarRef = (BLangRecordVarRef) variableReference; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : recordVarRef.recordRefFields) { if (hasRefDefinedOutsideLock(recordRefField.variableReference)) { return true; } } ExpressionNode recordRestParam = recordVarRef.restParam; return recordRestParam != null && hasRefDefinedOutsideLock((BLangExpression) recordRestParam); case TUPLE_VARIABLE_REF: BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) variableReference; for (BLangExpression expression : tupleVarRef.expressions) { if (hasRefDefinedOutsideLock(expression)) { return true; } } ExpressionNode tupleRestParam = tupleVarRef.restParam; return tupleRestParam != null && hasRefDefinedOutsideLock((BLangExpression) tupleRestParam); case ERROR_VARIABLE_REF: BLangErrorVarRef errorVarRef = (BLangErrorVarRef) variableReference; BLangVariableReference message = errorVarRef.message; if (message != null && hasRefDefinedOutsideLock(message)) { return true; } BLangVariableReference cause = errorVarRef.cause; if (cause != null && hasRefDefinedOutsideLock(cause)) { return true; } for (BLangNamedArgsExpression namedArgsExpression : errorVarRef.detail) { if (hasRefDefinedOutsideLock(namedArgsExpression.expr)) { return true; } } BLangVariableReference errorRestVar = errorVarRef.restVar; return errorRestVar != null && hasRefDefinedOutsideLock(errorRestVar); } return false; } private boolean isDefinedOutsideLock(Name name, int symTag, SymbolEnv currentEnv) { if (Names.IGNORE == name || symResolver.lookupSymbolInGivenScope(currentEnv, name, symTag) != symTable.notFoundSymbol) { return false; } if (currentEnv.node.getKind() == NodeKind.LOCK) { return true; } return isDefinedOutsideLock(name, symTag, currentEnv.enclEnv); } private boolean isInIsolatedObjectMethod(SymbolEnv env, boolean ignoreInit) { BLangInvokableNode enclInvokable = env.enclInvokable; if (enclInvokable == null || (enclInvokable.getKind() != NodeKind.FUNCTION && enclInvokable.getKind() != NodeKind.RESOURCE_FUNC)) { return false; } BLangFunction enclFunction = (BLangFunction) enclInvokable; if (!enclFunction.attachedFunction) { return false; } if (enclFunction.objInitFunction && ignoreInit) { return false; } BType ownerType = enclInvokable.symbol.owner.type; return ownerType.tag == TypeTags.OBJECT && isIsolated(ownerType.flags); } private boolean isInvalidCopyIn(BLangSimpleVarRef varRefExpr, SymbolEnv currentEnv) { return isInvalidCopyIn(varRefExpr, names.fromIdNode(varRefExpr.variableName), varRefExpr.symbol.tag, currentEnv); } private boolean isInvalidCopyIn(BLangSimpleVarRef varRefExpr, Name name, int symTag, SymbolEnv currentEnv) { BSymbol symbol = symResolver.lookupSymbolInGivenScope(currentEnv, name, symTag); if (symbol != symTable.notFoundSymbol && (!(symbol instanceof BVarSymbol) || ((BVarSymbol) symbol).originalSymbol == null)) { return false; } if (currentEnv.node.getKind() == NodeKind.LOCK) { if (varRefExpr.parent == null) { return true; } return isInvalidTransferIn(varRefExpr); } return isInvalidCopyIn(varRefExpr, name, symTag, currentEnv.enclEnv); } private boolean isMethodCallOnSelfInIsolatedObject(BLangSimpleVarRef varRefExpr, BLangNode parent) { return isSelfVarInIsolatedObject(varRefExpr) && parent != null && parent.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR; } private boolean isSelfVarInIsolatedObject(BLangSimpleVarRef varRefExpr) { return isInIsolatedObjectMethod(env, true) && isSelfOfObject(varRefExpr); } private boolean isIsolatedModuleVariableSymbol(BSymbol symbol) { return symbol.owner.getKind() == SymbolKind.PACKAGE && isIsolated(symbol.flags); } private BSymbol getOriginalSymbol(BSymbol symbol) { if (!(symbol instanceof BVarSymbol)) { return symbol; } BVarSymbol varSymbol = (BVarSymbol) symbol; BVarSymbol originalSymbol = varSymbol.originalSymbol; return originalSymbol == null ? varSymbol : getOriginalSymbol(originalSymbol); } private void addToAccessedRestrictedVars(Map<BSymbol, List<BLangSimpleVarRef>> accessedRestrictedVars, BLangSimpleVarRef varRef) { BSymbol originalSymbol = getOriginalSymbol(varRef.symbol); if (accessedRestrictedVars.containsKey(originalSymbol)) { accessedRestrictedVars.get(originalSymbol).add(varRef); return; } accessedRestrictedVars.put(originalSymbol, new ArrayList<>() {{ add(varRef); }}); } private boolean isEnclosedLockWithinSameFunction(BLangLock currentLock, BLangLock potentialOuterLock) { return isEnclosedLockWithinSameFunction(currentLock.parent, potentialOuterLock); } private boolean isEnclosedLockWithinSameFunction(BLangNode parent, BLangLock potentialOuterLock) { if (parent == potentialOuterLock) { return true; } if (parent == null || parent.getKind() == NodeKind.FUNCTION) { return false; } return isEnclosedLockWithinSameFunction(parent.parent, potentialOuterLock); } private boolean isReferenceOfLetVarInitializedWithAnIsolatedExpression(BLangSimpleVarRef varRef) { BSymbol symbol = varRef.symbol; if ((symbol.owner.tag & SymTag.LET) != SymTag.LET) { return false; } BSymbol originalSymbol = getOriginalSymbol(symbol); for (int i = isolatedLetVarStack.size() - 1; i >= 0; i--) { if (isolatedLetVarStack.get(i).contains(originalSymbol)) { return true; } } return false; } private boolean isReferenceToVarDefinedInSameInvokable(BSymbol currentOwner, BInvokableSymbol enclInvokableSymbol) { if (currentOwner == enclInvokableSymbol) { return true; } if ((currentOwner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE) { return false; } BSymbol nextOwner = currentOwner.owner; if (nextOwner == null) { return false; } return isReferenceToVarDefinedInSameInvokable(nextOwner, enclInvokableSymbol); } private boolean isIsolationInferableFunction(BLangFunction funcNode) { Set<Flag> flagSet = funcNode.flagSet; if (flagSet.contains(Flag.INTERFACE)) { return false; } if (!flagSet.contains(Flag.ATTACHED)) { return !flagSet.contains(Flag.PUBLIC); } BSymbol owner = funcNode.symbol.owner; if (!Symbols.isFlagOn(owner.flags, Flags.PUBLIC)) { return true; } if (!(owner instanceof BClassSymbol)) { return false; } BClassSymbol ownerClassSymbol = (BClassSymbol) owner; return ownerClassSymbol.isServiceDecl || Symbols.isFlagOn(ownerClassSymbol.flags, Flags.OBJECT_CTOR); } private void markDependsOnIsolationNonInferableConstructs() { BLangInvokableNode enclInvokable = env.enclInvokable; BInvokableSymbol enclInvokableSymbol; if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return; } enclInvokableSymbol = this.arrowFunctionTempSymbolMap.get((BLangArrowFunction) env.enclEnv.node); } else { enclInvokableSymbol = enclInvokable.symbol; if (enclInvokable.getKind() == NodeKind.FUNCTION && ((BLangFunction) enclInvokable).attachedFunction) { BSymbol owner = enclInvokableSymbol.owner; if (this.isolationInferenceInfoMap.containsKey(owner)) { this.isolationInferenceInfoMap.get(owner).dependsOnlyOnInferableConstructs = false; } } if (!this.isolationInferenceInfoMap.containsKey(enclInvokableSymbol)) { return; } } this.isolationInferenceInfoMap.get(enclInvokableSymbol).dependsOnlyOnInferableConstructs = false; } private void analyzeFunctionForInference(BInvokableSymbol symbol) { if (Symbols.isFlagOn(symbol.flags, Flags.PUBLIC)) { markDependsOnIsolationNonInferableConstructs(); return; } markDependentlyIsolatedOnFunction(symbol); } private void markInitMethodDependentlyIsolatedOnFunction(BLangInvokableNode initMethod, BInvokableSymbol symbol) { BInvokableSymbol initMethodSymbol = initMethod.symbol; if (!isolationInferenceInfoMap.containsKey(initMethodSymbol)) { isolationInferenceInfoMap.put(initMethodSymbol, new IsolationInferenceInfo()); } markFunctionDependentlyIsolatedOnFunction(initMethod, symbol); } private void markDependentlyIsolatedOnFunction(BInvokableSymbol symbol) { BLangInvokableNode enclInvokable = env.enclInvokable; markFunctionDependentlyIsolatedOnFunction(enclInvokable, symbol); } private void markFunctionDependentlyIsolatedOnFunction(BLangInvokableNode enclInvokable, BInvokableSymbol symbol) { BInvokableSymbol enclInvokableSymbol; if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return; } enclInvokableSymbol = this.arrowFunctionTempSymbolMap.get((BLangArrowFunction) env.enclEnv.node); } else { enclInvokableSymbol = enclInvokable.symbol; if (!isolationInferenceInfoMap.containsKey(enclInvokableSymbol)) { return; } } isolationInferenceInfoMap.get(enclInvokableSymbol).dependsOnFunctions.add(symbol); } private boolean isNotInArrowFunctionBody(SymbolEnv env) { return env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY || env.enclEnv.node.getKind() != NodeKind.ARROW_EXPR; } private void markInitMethodDependentlyIsolatedOnVar(BLangInvokableNode initMethod, BSymbol symbol) { BInvokableSymbol initMethodSymbol = initMethod.symbol; if (!isolationInferenceInfoMap.containsKey(initMethodSymbol)) { isolationInferenceInfoMap.put(initMethodSymbol, new IsolationInferenceInfo()); } markFunctionDependentlyIsolatedOnVar(initMethod, symbol); } private void markDependentlyIsolatedOnVar(BSymbol symbol) { BLangInvokableNode enclInvokable = env.enclInvokable; markFunctionDependentlyIsolatedOnVar(enclInvokable, symbol); } private void markFunctionDependentlyIsolatedOnVar(BLangInvokableNode enclInvokable, BSymbol symbol) { BInvokableSymbol enclInvokableSymbol; if (enclInvokable == null) { if (isNotInArrowFunctionBody(env)) { return; } enclInvokableSymbol = this.arrowFunctionTempSymbolMap.get((BLangArrowFunction) env.enclEnv.node); } else { enclInvokableSymbol = enclInvokable.symbol; if (!isolationInferenceInfoMap.containsKey(enclInvokableSymbol)) { return; } } isolationInferenceInfoMap.get(enclInvokableSymbol).dependsOnVariablesAndClasses.add(symbol); } private Set<BSymbol> getModuleLevelVarSymbols(List<BLangVariable> moduleLevelVars) { Set<BSymbol> symbols = new HashSet<>(moduleLevelVars.size()); for (BLangVariable globalVar : moduleLevelVars) { symbols.add(globalVar.symbol); } return symbols; } private void populateNonPublicMutableOrNonIsolatedVars(Set<BSymbol> moduleLevelVarSymbols) { for (BSymbol moduleLevelVarSymbol : moduleLevelVarSymbols) { if (!isVarRequiringInference(moduleLevelVarSymbol)) { continue; } this.isolationInferenceInfoMap.put(moduleLevelVarSymbol, new VariableIsolationInferenceInfo()); } } private void populateNonPublicIsolatedInferableClasses(List<BLangClassDefinition> classDefinitions) { for (BLangClassDefinition classDefinition : classDefinitions) { populateInferableClass(classDefinition); } } private boolean inObjectInitMethod() { BLangInvokableNode enclInvokable = env.enclInvokable; if (enclInvokable == null || enclInvokable.getKind() != NodeKind.FUNCTION) { return false; } return ((BLangFunction) enclInvokable).objInitFunction; } private boolean isVarRequiringInference(BSymbol moduleLevelVarSymbol) { long symbolFlags = moduleLevelVarSymbol.flags; if (Symbols.isFlagOn(symbolFlags, Flags.PUBLIC) || Symbols.isFlagOn(symbolFlags, Flags.ISOLATED)) { return false; } if (!Symbols.isFlagOn(symbolFlags, Flags.FINAL)) { return true; } BType type = moduleLevelVarSymbol.type; return !types.isInherentlyImmutableType(type) && !Symbols.isFlagOn(type.flags, Flags.READONLY); } private void populateInferableClass(BLangClassDefinition classDefinition) { if (Symbols.isFlagOn(classDefinition.symbol.flags, Flags.PUBLIC) && !classDefinition.isServiceDecl && !classDefinition.flagSet.contains(Flag.OBJECT_CTOR)) { return; } BType type = classDefinition.getBType(); if (Symbols.isFlagOn(type.flags, Flags.ISOLATED)) { return; } Set<BLangIdentifier> protectedFields = new HashSet<>(); Set<BSymbol> dependentObjectTypes = new HashSet<>(); Map<String, BLangSimpleVariable> fields = new HashMap<>(); for (BLangSimpleVariable field : classDefinition.fields) { fields.put(field.name.value, field); } for (BLangSimpleVariable referencedField : classDefinition.referencedFields) { String name = referencedField.name.value; if (fields.containsKey(name)) { continue; } fields.put(name, referencedField); } for (BLangSimpleVariable field : fields.values()) { boolean isFinal = field.flagSet.contains(Flag.FINAL); boolean isPrivate = field.flagSet.contains(Flag.PRIVATE); if (!isFinal && !isPrivate) { return; } BType fieldType = field.getBType(); if (isFinal && types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(fieldType)) { continue; } boolean subtypeOfReadOnlyOrIsolatedObjectOrInferableObject = isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(classDefinition.symbol.owner, fieldType, dependentObjectTypes); if (!isPrivate && !subtypeOfReadOnlyOrIsolatedObjectOrInferableObject) { return; } protectedFields.add(field.name); } ClassIsolationInferenceInfo inferenceInfo = new ClassIsolationInferenceInfo(protectedFields); this.isolationInferenceInfoMap.put(classDefinition.symbol, inferenceInfo); inferenceInfo.dependsOnVariablesAndClasses.addAll(dependentObjectTypes); } private boolean isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(BSymbol owner, BType type) { return isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(owner, type, new HashSet<>()); } private boolean isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(BSymbol owner, BType type, Set<BSymbol> inferableClasses) { if (types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(type)) { return true; } int tag = type.tag; if (tag == TypeTags.OBJECT) { BTypeSymbol tsymbol = type.tsymbol; boolean inferable = tsymbol.owner == owner && !Symbols.isFlagOn(tsymbol.flags, Flags.PUBLIC); if (inferable) { inferableClasses.add(tsymbol); } return inferable; } if (tag != TypeTags.UNION) { return false; } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isSubtypeOfReadOnlyOrIsolatedObjectOrInferableObject(owner, memberType, inferableClasses)) { return false; } } return true; } private Set<BType> getPubliclyExposedObjectTypes(BLangPackage bLangPackage) { Set<BType> publiclyExposedTypes = new HashSet<>(); BPubliclyExposedInferableTypeCollector collector = new BPubliclyExposedInferableTypeCollector(publiclyExposedTypes); List<BLangVariable> moduleVarsAndConstants = new ArrayList<>() {{ addAll(bLangPackage.globalVars); addAll(bLangPackage.constants); }}; for (BLangVariable construct : moduleVarsAndConstants) { if (!construct.flagSet.contains(Flag.PUBLIC)) { continue; } BLangType typeNode = construct.typeNode; if (typeNode == null) { continue; } collector.visitType(typeNode.getBType()); } for (BLangTypeDefinition typeDefinition : bLangPackage.typeDefinitions) { Set<Flag> flagSet = typeDefinition.flagSet; if (!flagSet.contains(Flag.PUBLIC) || flagSet.contains(Flag.ANONYMOUS)) { continue; } collector.visitType(typeDefinition.typeNode.getBType()); } for (BLangClassDefinition classDefinition : bLangPackage.classDefinitions) { Set<Flag> flagSet = classDefinition.flagSet; if (!flagSet.contains(Flag.PUBLIC) || classDefinition.isServiceDecl || flagSet.contains(Flag.OBJECT_CTOR)) { continue; } collector.visitType(classDefinition.getBType()); } for (BLangFunction function : bLangPackage.functions) { if (!function.flagSet.contains(Flag.PUBLIC) && (!function.attachedFunction || !function.receiver.flagSet.contains(Flag.PUBLIC))) { continue; } collector.visitType(function.getBType()); } return publiclyExposedTypes; } private void inferIsolation(Set<BSymbol> moduleLevelVarSymbols, Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions) { for (Map.Entry<BSymbol, IsolationInferenceInfo> entry : this.isolationInferenceInfoMap.entrySet()) { IsolationInferenceInfo value = entry.getValue(); BSymbol symbol = entry.getKey(); if (value.getKind() == IsolationInferenceKind.FUNCTION) { if (inferFunctionIsolation(symbol, value, publiclyExposedObjectTypes, classDefinitions, new HashSet<>())) { symbol.flags |= Flags.ISOLATED; if (!moduleLevelVarSymbols.contains(symbol)) { symbol.type.flags |= Flags.ISOLATED; } } continue; } boolean isObjectType = symbol.kind == SymbolKind.OBJECT; if (!isObjectType && isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(publiclyExposedObjectTypes, classDefinitions, symbol, new HashSet<>())) { continue; } if (inferVariableOrClassIsolation(publiclyExposedObjectTypes, classDefinitions, symbol, (VariableIsolationInferenceInfo) value, isObjectType, new HashSet<>())) { symbol.flags |= Flags.ISOLATED; if (isObjectType) { symbol.type.flags |= Flags.ISOLATED; } } } this.isolationInferenceInfoMap.clear(); this.arrowFunctionTempSymbolMap.clear(); } private boolean inferVariableOrClassIsolation(Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, BSymbol symbol, VariableIsolationInferenceInfo inferenceInfo, boolean isObjectType, Set<BSymbol> unresolvedSymbols) { if (!unresolvedSymbols.add(symbol)) { return true; } if (!inferenceInfo.dependsOnlyOnInferableConstructs) { return false; } if (inferenceInfo.accessedOutsideLockStatement) { if (!inferenceInfo.accessOutsideLockStatementValidIfInferredIsolated) { if (inferenceInfo.getKind() == IsolationInferenceKind.CLASS) { return false; } if (Symbols.isFlagOn(symbol.flags, Flags.FINAL)) { return isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, symbol.type, unresolvedSymbols); } return false; } for (BType bType : inferenceInfo.typesOfFinalFieldsAccessedOutsideLock) { if (!isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, bType, unresolvedSymbols)) { return false; } } } if (isObjectType) { if (publiclyExposedObjectTypes.contains(symbol.type)) { return false; } BLangClassDefinition classDefinition = null; for (BLangClassDefinition classDef : classDefinitions) { if (classDef.symbol == symbol) { classDefinition = classDef; break; } } if (classDefinition != null) { List<BLangSimpleVariable> classFields = classDefinition.fields; Map<BLangIdentifier, BLangSimpleVariable> fields = new HashMap<>(classFields.size()); for (BLangSimpleVariable classField : classFields) { fields.put(classField.name, classField); } for (BLangIdentifier protectedField : ((ClassIsolationInferenceInfo) inferenceInfo).protectedFields) { BLangSimpleVariable field = fields.get(protectedField); if (field.flagSet.contains(Flag.PRIVATE)) { continue; } if (!field.flagSet.contains(Flag.FINAL)) { return false; } BType fieldType = field.typeNode.getBType(); if (!isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, fieldType, unresolvedSymbols)) { return false; } } for (BLangSimpleVariable field : classDefinition.fields) { BLangExpression expr = field.expr; if (expr != null && !isIsolatedExpression(expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { return false; } } BLangFunction initFunction = classDefinition.initFunction; if (initFunction != null) { BLangFunctionBody body = initFunction.body; for (BLangStatement stmt : ((BLangBlockFunctionBody) body).stmts) { if (stmt.getKind() != NodeKind.ASSIGNMENT) { continue; } BLangAssignment assignmentStmt = (BLangAssignment) stmt; BLangExpression lhs = assignmentStmt.varRef; if (lhs.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR) { continue; } BLangFieldBasedAccess fieldAccessExpr = (BLangFieldBasedAccess) lhs; BLangExpression calledOnExpr = fieldAccessExpr.expr; if (calledOnExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { continue; } if (!isSelfOfObject((BLangSimpleVarRef) calledOnExpr)) { continue; } if (!isIsolatedExpression(assignmentStmt.expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { return false; } } } } } else if (isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(publiclyExposedObjectTypes, classDefinitions, symbol, unresolvedSymbols)) { return true; } else if (Symbols.isFlagOn(symbol.flags, Flags.LISTENER)) { return false; } for (LockInfo lockInfo : inferenceInfo.accessedLockInfo) { if (!lockInfo.accessedRestrictedVars.isEmpty()) { return false; } for (BSymbol accessedPotentiallyIsolatedVar : lockInfo.accessedPotentiallyIsolatedVars) { if (accessedPotentiallyIsolatedVar == symbol) { continue; } if (!isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(publiclyExposedObjectTypes, classDefinitions, accessedPotentiallyIsolatedVar, unresolvedSymbols)) { return false; } } for (BLangExpression expr : lockInfo.nonIsolatedTransferInExpressions) { if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expr).symbol == symbol) { continue; } if (isIsolatedExpression(expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { continue; } return false; } for (BLangExpression expr : lockInfo.nonIsolatedTransferOutExpressions) { if (isIsolatedExpression(expr, false, false, new ArrayList<>(), true, publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { continue; } return false; } for (BLangInvocation nonIsolatedInvocation : lockInfo.nonIsolatedInvocations) { BSymbol funcSymbol = nonIsolatedInvocation.symbol; if (!this.isolationInferenceInfoMap.containsKey(funcSymbol)) { return false; } if (inferFunctionIsolation(funcSymbol, this.isolationInferenceInfoMap.get(funcSymbol), publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { continue; } return false; } } return true; } private boolean isFinalVarOfReadOnlyOrIsolatedObjectTypeWithInference(Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, BSymbol symbol, Set<BSymbol> unresolvedSymbols) { return Symbols.isFlagOn(symbol.flags, Flags.FINAL) && isSubTypeOfReadOnlyOrIsolatedObjectUnionWithInference(publiclyExposedObjectTypes, classDefinitions, true, symbol.type, unresolvedSymbols); } private boolean inferFunctionIsolation(BSymbol symbol, IsolationInferenceInfo functionIsolationInferenceInfo, Set<BType> publiclyExposedObjectTypes, List<BLangClassDefinition> classDefinitions, Set<BSymbol> unresolvedSymbols) { if (!unresolvedSymbols.add(symbol)) { return true; } if (!functionIsolationInferenceInfo.dependsOnlyOnInferableConstructs) { return false; } if (symbol.kind == SymbolKind.FUNCTION) { BVarSymbol receiverSymbol = ((BInvokableSymbol) symbol).receiverSymbol; if (receiverSymbol != null && receiverSymbol.type.tag == TypeTags.OBJECT && publiclyExposedObjectTypes.contains(receiverSymbol.type)) { return false; } } if (functionIsolationInferenceInfo.inferredIsolated) { return true; } for (BInvokableSymbol bInvokableSymbol : functionIsolationInferenceInfo.dependsOnFunctions) { if (!this.isolationInferenceInfoMap.containsKey(bInvokableSymbol)) { return false; } if (!inferFunctionIsolation(bInvokableSymbol, this.isolationInferenceInfoMap.get(bInvokableSymbol), publiclyExposedObjectTypes, classDefinitions, unresolvedSymbols)) { return false; } } for (BSymbol dependsOnVariable : functionIsolationInferenceInfo.dependsOnVariablesAndClasses) { if (Symbols.isFlagOn(dependsOnVariable.flags, Flags.ISOLATED)) { continue; } if (!this.isolationInferenceInfoMap.containsKey(dependsOnVariable)) { return false; } if (!inferVariableOrClassIsolation(publiclyExposedObjectTypes, classDefinitions, dependsOnVariable, (VariableIsolationInferenceInfo) this.isolationInferenceInfoMap.get( dependsOnVariable), false, unresolvedSymbols)) { return false; } } if (unresolvedSymbols.size() == 1) { functionIsolationInferenceInfo.inferredIsolated = true; } return true; } private void logServiceIsolationWarnings(List<BLangClassDefinition> classDefinitions) { for (BLangClassDefinition classDefinition : classDefinitions) { if (classDefinition.flagSet.contains(Flag.SERVICE)) { logServiceIsolationWarnings(classDefinition); } } } private void logServiceIsolationWarnings(BLangClassDefinition classDefinition) { boolean isolatedService = isIsolated(classDefinition.getBType().flags); for (BLangFunction function : classDefinition.functions) { Set<Flag> flagSet = function.flagSet; if (!flagSet.contains(Flag.RESOURCE) && !flagSet.contains(Flag.REMOTE)) { continue; } boolean isolatedMethod = isIsolated(function.getBType().flags); if (isolatedService && isolatedMethod) { continue; } dlog.warning(getStartLocation(function.pos), getWarningCode(isolatedService, isolatedMethod)); } } private Location getStartLocation(Location location) { LineRange lineRange = location.lineRange(); LinePosition linePosition = lineRange.startLine(); int startLine = linePosition.line(); int startColumn = linePosition.offset(); return new BLangDiagnosticLocation(lineRange.filePath(), startLine, startLine, startColumn, startColumn); } private DiagnosticWarningCode getWarningCode(boolean isolatedService, boolean isolatedMethod) { if (!isolatedService && !isolatedMethod) { return DiagnosticWarningCode .CONCURRENT_CALLS_WILL_NOT_BE_MADE_TO_NON_ISOLATED_METHOD_IN_NON_ISOLATED_SERVICE; } if (isolatedService) { return DiagnosticWarningCode.CONCURRENT_CALLS_WILL_NOT_BE_MADE_TO_NON_ISOLATED_METHOD; } return DiagnosticWarningCode.CONCURRENT_CALLS_WILL_NOT_BE_MADE_TO_NON_ISOLATED_SERVICE; } private BInvokableSymbol createTempSymbolIfNonExistent(BLangArrowFunction bLangArrowFunction) { if (arrowFunctionTempSymbolMap.containsKey(bLangArrowFunction)) { return arrowFunctionTempSymbolMap.get(bLangArrowFunction); } TemporaryArrowFunctionSymbol symbol = new TemporaryArrowFunctionSymbol(bLangArrowFunction); this.arrowFunctionTempSymbolMap.put(bLangArrowFunction, symbol); this.isolationInferenceInfoMap.put(symbol, new IsolationInferenceInfo()); return symbol; } /** * For lock statements with restricted var usage, invalid transfers and non-isolated invocations should result in * compilation errors. This class holds potentially erroneous expression per lock statement, and the protected * variables accessed in the lock statement, and information required for isolated inference. */ private static class LockInfo { BLangLock lockNode; Map<BSymbol, List<BLangSimpleVarRef>> accessedRestrictedVars = new HashMap<>(); List<BLangSimpleVarRef> nonCaptureBindingPatternVarRefsOnLhs = new ArrayList<>(); List<BLangExpression> nonIsolatedTransferInExpressions = new ArrayList<>(); List<BLangExpression> nonIsolatedTransferOutExpressions = new ArrayList<>(); List<BLangInvocation> nonIsolatedInvocations = new ArrayList<>(); Set<BSymbol> accessedPotentiallyIsolatedVars = new HashSet<>(); private LockInfo(BLangLock lockNode) { this.lockNode = lockNode; } } private static class IsolationInferenceInfo { boolean dependsOnlyOnInferableConstructs = true; Set<BInvokableSymbol> dependsOnFunctions = new HashSet<>(); Set<BSymbol> dependsOnVariablesAndClasses = new HashSet<>(); boolean inferredIsolated = false; IsolationInferenceKind getKind() { return IsolationInferenceKind.FUNCTION; } } private static class VariableIsolationInferenceInfo extends IsolationInferenceInfo { Set<LockInfo> accessedLockInfo = new HashSet<>(); boolean accessedOutsideLockStatement = false; boolean accessOutsideLockStatementValidIfInferredIsolated = true; Set<BType> typesOfFinalFieldsAccessedOutsideLock = new HashSet<>(); @Override IsolationInferenceKind getKind() { return IsolationInferenceKind.VARIABLE; } } private static class ClassIsolationInferenceInfo extends VariableIsolationInferenceInfo { Set<BLangIdentifier> protectedFields; ClassIsolationInferenceInfo(Set<BLangIdentifier> protectedFields) { this.protectedFields = protectedFields; } @Override IsolationInferenceKind getKind() { return IsolationInferenceKind.CLASS; } } private enum IsolationInferenceKind { CLASS, VARIABLE, FUNCTION } private class TemporaryArrowFunctionSymbol extends BInvokableSymbol { TemporaryArrowFunctionSymbol(BLangArrowFunction fn) { super(SymTag.FUNCTION, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, fn.funcType, env.enclEnv.enclVarSym, null, VIRTUAL); this.kind = SymbolKind.FUNCTION; } } private static class BPubliclyExposedInferableTypeCollector implements TypeVisitor { Set<BType> unresolvedTypes; Set<BType> exposedTypes; public BPubliclyExposedInferableTypeCollector(Set<BType> exposedTypes) { this.unresolvedTypes = new HashSet<>(); this.exposedTypes = exposedTypes; } public void visitType(BType type) { if (type == null) { return; } if (!unresolvedTypes.add(type)) { return; } type.accept(this); } @Override public void visit(BAnnotationType bAnnotationType) { } @Override public void visit(BArrayType bArrayType) { visitType(bArrayType.eType); } @Override public void visit(BBuiltInRefType bBuiltInRefType) { } @Override public void visit(BAnyType bAnyType) { } @Override public void visit(BAnydataType bAnydataType) { } @Override public void visit(BErrorType bErrorType) { visitType(bErrorType.detailType); } @Override public void visit(BFiniteType bFiniteType) { } @Override public void visit(BInvokableType bInvokableType) { if (Symbols.isFlagOn(bInvokableType.flags, Flags.ANY_FUNCTION)) { return; } for (BType paramType : bInvokableType.paramTypes) { visitType(paramType); } visitType(bInvokableType.restType); visitType(bInvokableType.retType); } @Override public void visit(BJSONType bjsonType) { } @Override public void visit(BMapType bMapType) { visitType(bMapType.constraint); } @Override public void visit(BStreamType bStreamType) { visitType(bStreamType.constraint); visitType(bStreamType.completionType); } @Override public void visit(BTypedescType bTypedescType) { visitType(bTypedescType.constraint); } @Override public void visit(BParameterizedType bTypedescType) { } @Override public void visit(BNeverType bNeverType) { } @Override public void visit(BNilType bNilType) { } @Override public void visit(BNoType bNoType) { } @Override public void visit(BPackageType bPackageType) { } @Override public void visit(BStructureType bStructureType) { } @Override public void visit(BTupleType bTupleType) { for (BType memType : bTupleType.tupleTypes) { visitType(memType); } visitType(bTupleType.restType); } @Override public void visit(BUnionType bUnionType) { for (BType memType : bUnionType.getMemberTypes()) { visitType(memType); } } @Override public void visit(BIntersectionType bIntersectionType) { for (BType constituentType : bIntersectionType.getConstituentTypes()) { visitType(constituentType); } visitType(bIntersectionType.effectiveType); } @Override public void visit(BXMLType bXmlType) { visitType(bXmlType.constraint); } @Override public void visit(BTableType bTableType) { visitType(bTableType.constraint); visitType(bTableType.keyTypeConstraint); } @Override public void visit(BRecordType bRecordType) { for (BField field : bRecordType.fields.values()) { visitType(field.type); } if (!bRecordType.sealed) { visitType(bRecordType.restFieldType); } } @Override public void visit(BObjectType bObjectType) { this.exposedTypes.add(bObjectType); for (BField field : bObjectType.fields.values()) { visitType(field.type); } for (BAttachedFunction attachedFunc : ((BObjectTypeSymbol) bObjectType.tsymbol).attachedFuncs) { visitType(attachedFunc.type); } } @Override public void visit(BType bType) { } @Override public void visit(BFutureType bFutureType) { visitType(bFutureType.constraint); } @Override public void visit(BHandleType bHandleType) { } } }
Hi, there is problem in here. When we are loading image, we shouldn't write the editLog, or else some error will report (Because bdbje is open only after the image has been loaded).
public static DeleteHandler read(DataInput in) throws IOException { String json = Text.readString(in); DeleteHandler deleteHandler = GsonUtils.GSON.fromJson(json, DeleteHandler.class); deleteHandler.removeOldDeleteInfos(); return deleteHandler; }
return deleteHandler;
public static DeleteHandler read(DataInput in) throws IOException { String json = Text.readString(in); DeleteHandler deleteHandler = GsonUtils.GSON.fromJson(json, DeleteHandler.class); deleteHandler.removeOldDeleteInfos(new Timestamp()); return deleteHandler; }
class DeleteHandler extends MasterDaemon implements Writable { private static final Logger LOG = LogManager.getLogger(DeleteHandler.class); private Map<Long, DeleteJob> idToDeleteJob; @SerializedName(value = "dbToDeleteInfos") private Map<Long, List<DeleteInfo>> dbToDeleteInfos; private ReentrantReadWriteLock lock; public DeleteHandler() { super("DeleteHandler", Config.delete_info_clean_interval_second); idToDeleteJob = Maps.newConcurrentMap(); dbToDeleteInfos = Maps.newConcurrentMap(); } private enum CancelType { METADATA_MISSING, TIMEOUT, COMMIT_FAIL, UNKNOWN } public void readLock() { lock.readLock().lock(); } public void readUnlock() { lock.readLock().unlock(); } private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public void process(DeleteStmt stmt) throws DdlException, QueryStateException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); List<String> partitionNames = stmt.getPartitionNames(); boolean noPartitionSpecified = partitionNames.isEmpty(); List<Predicate> conditions = stmt.getDeleteConditions(); Database db = Catalog.getCurrentCatalog().getDb(dbName); if (db == null) { throw new DdlException("Db does not exist. name: " + dbName); } DeleteJob deleteJob = null; try { MarkedCountDownLatch<Long, Long> countDownLatch; long transactionId = -1; Table table = null; try { table = db.getTableOrThrowException(tableName, Table.TableType.OLAP); } catch (MetaNotFoundException e) { throw new DdlException(e.getMessage()); } table.readLock(); try { OlapTable olapTable = (OlapTable) table; if (olapTable.getState() != OlapTable.OlapTableState.NORMAL) { } if (noPartitionSpecified) { if (olapTable.getPartitionInfo().getType() == PartitionType.RANGE || olapTable.getPartitionInfo().getType() == PartitionType.LIST) { if (!ConnectContext.get().getSessionVariable().isDeleteWithoutPartition()) { throw new DdlException("This is a range or list partitioned table." + " You should specify partition in delete stmt, or set delete_without_partition to true"); } else { partitionNames.addAll(olapTable.getPartitionNames()); } } else if (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) { partitionNames.add(olapTable.getName()); } else { throw new DdlException("Unknown partition type: " + olapTable.getPartitionInfo().getType()); } } Map<Long, Short> partitionReplicaNum = Maps.newHashMap(); List<Partition> partitions = Lists.newArrayList(); for (String partName : partitionNames) { Partition partition = olapTable.getPartition(partName); if (partition == null) { throw new DdlException("Partition does not exist. name: " + partName); } partitions.add(partition); partitionReplicaNum.put(partition.getId(), ((OlapTable) table).getPartitionInfo().getReplicationNum(partition.getId())); } List<String> deleteConditions = Lists.newArrayList(); checkDeleteV2(olapTable, partitions, conditions, deleteConditions); String label = "delete_" + UUID.randomUUID(); long jobId = Catalog.getCurrentCatalog().getNextId(); transactionId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(), Lists.newArrayList(table.getId()), label, null, new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), TransactionState.LoadJobSourceType.FRONTEND, jobId, Config.stream_load_default_timeout_second); DeleteInfo deleteInfo = new DeleteInfo(db.getId(), olapTable.getId(), tableName, deleteConditions); deleteInfo.setPartitions(noPartitionSpecified, partitions.stream().map(p -> p.getId()).collect(Collectors.toList()), partitionNames); deleteJob = new DeleteJob(jobId, transactionId, label, partitionReplicaNum, deleteInfo); idToDeleteJob.put(deleteJob.getTransactionId(), deleteJob); Catalog.getCurrentGlobalTransactionMgr().getCallbackFactory().addCallback(deleteJob); AgentBatchTask batchTask = new AgentBatchTask(); int totalReplicaNum = 0; for (Partition partition : partitions) { for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) { for (Tablet tablet : index.getTablets()) { totalReplicaNum += tablet.getReplicas().size(); } } } countDownLatch = new MarkedCountDownLatch<Long, Long>(totalReplicaNum); for (Partition partition : partitions) { for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) { long indexId = index.getId(); int schemaHash = olapTable.getSchemaHashByIndexId(indexId); for (Tablet tablet : index.getTablets()) { long tabletId = tablet.getId(); TPushType type = TPushType.DELETE; for (Replica replica : tablet.getReplicas()) { long replicaId = replica.getId(); long backendId = replica.getBackendId(); countDownLatch.addMark(backendId, tabletId); PushTask pushTask = new PushTask(null, replica.getBackendId(), db.getId(), olapTable.getId(), partition.getId(), indexId, tabletId, replicaId, schemaHash, -1, 0, "", -1, 0, -1, type, conditions, true, TPriority.NORMAL, TTaskType.REALTIME_PUSH, transactionId, Catalog.getCurrentGlobalTransactionMgr().getTransactionIDGenerator().getNextTransactionId()); pushTask.setIsSchemaChanging(false); pushTask.setCountDownLatch(countDownLatch); if (AgentTaskQueue.addTask(pushTask)) { batchTask.addTask(pushTask); deleteJob.addPushTask(pushTask); deleteJob.addTablet(tabletId); } } } } } if (batchTask.getTaskNum() > 0) { AgentTaskExecutor.submit(batchTask); } } catch (Throwable t) { LOG.warn("error occurred during delete process", t); if (Catalog.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), transactionId) != null) { cancelJob(deleteJob, CancelType.UNKNOWN, t.getMessage()); } throw new DdlException(t.getMessage(), t); } finally { table.readUnlock(); } long timeoutMs = deleteJob.getTimeoutMs(); LOG.info("waiting delete Job finish, signature: {}, timeout: {}", transactionId, timeoutMs); boolean ok = false; try { ok = countDownLatch.await(timeoutMs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { LOG.warn("InterruptedException: ", e); ok = false; } if (!ok) { String errMsg = ""; List<Entry<Long, Long>> unfinishedMarks = countDownLatch.getLeftMarks(); List<Entry<Long, Long>> subList = unfinishedMarks.subList(0, Math.min(unfinishedMarks.size(), 5)); if (!subList.isEmpty()) { errMsg = "unfinished replicas [BackendId=TabletId]: " + Joiner.on(", ").join(subList); } LOG.warn(errMsg); try { deleteJob.checkAndUpdateQuorum(); } catch (MetaNotFoundException e) { cancelJob(deleteJob, CancelType.METADATA_MISSING, e.getMessage()); throw new DdlException(e.getMessage(), e); } DeleteState state = deleteJob.getState(); switch (state) { case UN_QUORUM: LOG.warn("delete job timeout: transactionId {}, timeout {}, {}", transactionId, timeoutMs, errMsg); cancelJob(deleteJob, CancelType.TIMEOUT, "delete job timeout"); throw new DdlException("failed to execute delete. transaction id " + transactionId + ", timeout(ms) " + timeoutMs + ", " + errMsg); case QUORUM_FINISHED: case FINISHED: try { long nowQuorumTimeMs = System.currentTimeMillis(); long endQuorumTimeoutMs = nowQuorumTimeMs + timeoutMs / 2; while (deleteJob.getState() == DeleteState.QUORUM_FINISHED && endQuorumTimeoutMs > nowQuorumTimeMs) { deleteJob.checkAndUpdateQuorum(); Thread.sleep(1000); nowQuorumTimeMs = System.currentTimeMillis(); LOG.debug("wait for quorum finished delete job: {}, txn id: {}" + deleteJob.getId(), transactionId); } } catch (MetaNotFoundException e) { cancelJob(deleteJob, CancelType.METADATA_MISSING, e.getMessage()); throw new DdlException(e.getMessage(), e); } catch (InterruptedException e) { cancelJob(deleteJob, CancelType.UNKNOWN, e.getMessage()); throw new DdlException(e.getMessage(), e); } commitJob(deleteJob, db, table, timeoutMs); break; default: Preconditions.checkState(false, "wrong delete job state: " + state.name()); break; } } else { commitJob(deleteJob, db, table, timeoutMs); } } finally { if (!FeConstants.runningUnitTest) { clearJob(deleteJob); } } } private void commitJob(DeleteJob job, Database db, Table table, long timeoutMs) throws DdlException, QueryStateException { TransactionStatus status = null; try { unprotectedCommitJob(job, db, table, timeoutMs); status = Catalog.getCurrentGlobalTransactionMgr(). getTransactionState(db.getId(), job.getTransactionId()).getTransactionStatus(); } catch (UserException e) { if (cancelJob(job, CancelType.COMMIT_FAIL, e.getMessage())) { throw new DdlException(e.getMessage(), e); } } StringBuilder sb = new StringBuilder(); sb.append("{'label':'").append(job.getLabel()).append("', 'status':'").append(status.name()); sb.append("', 'txnId':'").append(job.getTransactionId()).append("'"); switch (status) { case COMMITTED: { String errMsg = "delete job is committed but may be taking effect later"; sb.append(", 'err':'").append(errMsg).append("'"); sb.append("}"); throw new QueryStateException(MysqlStateType.OK, sb.toString()); } case VISIBLE: { sb.append("}"); throw new QueryStateException(MysqlStateType.OK, sb.toString()); } default: Preconditions.checkState(false, "wrong transaction status: " + status.name()); break; } }
class DeleteHandler implements Writable { private static final Logger LOG = LogManager.getLogger(DeleteHandler.class); private Map<Long, DeleteJob> idToDeleteJob; @SerializedName(value = "dbToDeleteInfos") private Map<Long, List<DeleteInfo>> dbToDeleteInfos; private ReentrantReadWriteLock lock; public DeleteHandler() { idToDeleteJob = Maps.newConcurrentMap(); dbToDeleteInfos = Maps.newConcurrentMap(); lock = new ReentrantReadWriteLock(); } private enum CancelType { METADATA_MISSING, TIMEOUT, COMMIT_FAIL, UNKNOWN } public void readLock() { lock.readLock().lock(); } public void readUnlock() { lock.readLock().unlock(); } private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public void process(DeleteStmt stmt) throws DdlException, QueryStateException { String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); List<String> partitionNames = stmt.getPartitionNames(); boolean noPartitionSpecified = partitionNames.isEmpty(); List<Predicate> conditions = stmt.getDeleteConditions(); Database db = Catalog.getCurrentCatalog().getDb(dbName); if (db == null) { throw new DdlException("Db does not exist. name: " + dbName); } DeleteJob deleteJob = null; try { MarkedCountDownLatch<Long, Long> countDownLatch; long transactionId = -1; Table table = null; try { table = db.getTableOrThrowException(tableName, Table.TableType.OLAP); } catch (MetaNotFoundException e) { throw new DdlException(e.getMessage()); } table.readLock(); try { OlapTable olapTable = (OlapTable) table; if (olapTable.getState() != OlapTable.OlapTableState.NORMAL) { } if (noPartitionSpecified) { if (olapTable.getPartitionInfo().getType() == PartitionType.RANGE || olapTable.getPartitionInfo().getType() == PartitionType.LIST) { if (!ConnectContext.get().getSessionVariable().isDeleteWithoutPartition()) { throw new DdlException("This is a range or list partitioned table." + " You should specify partition in delete stmt, or set delete_without_partition to true"); } else { partitionNames.addAll(olapTable.getPartitionNames()); } } else if (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) { partitionNames.add(olapTable.getName()); } else { throw new DdlException("Unknown partition type: " + olapTable.getPartitionInfo().getType()); } } Map<Long, Short> partitionReplicaNum = Maps.newHashMap(); List<Partition> partitions = Lists.newArrayList(); for (String partName : partitionNames) { Partition partition = olapTable.getPartition(partName); if (partition == null) { throw new DdlException("Partition does not exist. name: " + partName); } partitions.add(partition); partitionReplicaNum.put(partition.getId(), ((OlapTable) table).getPartitionInfo().getReplicationNum(partition.getId())); } List<String> deleteConditions = Lists.newArrayList(); checkDeleteV2(olapTable, partitions, conditions, deleteConditions); String label = "delete_" + UUID.randomUUID(); long jobId = Catalog.getCurrentCatalog().getNextId(); transactionId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(), Lists.newArrayList(table.getId()), label, null, new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()), TransactionState.LoadJobSourceType.FRONTEND, jobId, Config.stream_load_default_timeout_second); DeleteInfo deleteInfo = new DeleteInfo(db.getId(), olapTable.getId(), tableName, deleteConditions); deleteInfo.setPartitions(noPartitionSpecified, partitions.stream().map(p -> p.getId()).collect(Collectors.toList()), partitionNames); deleteJob = new DeleteJob(jobId, transactionId, label, partitionReplicaNum, deleteInfo); idToDeleteJob.put(deleteJob.getTransactionId(), deleteJob); Catalog.getCurrentGlobalTransactionMgr().getCallbackFactory().addCallback(deleteJob); AgentBatchTask batchTask = new AgentBatchTask(); int totalReplicaNum = 0; for (Partition partition : partitions) { for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) { for (Tablet tablet : index.getTablets()) { totalReplicaNum += tablet.getReplicas().size(); } } } countDownLatch = new MarkedCountDownLatch<Long, Long>(totalReplicaNum); for (Partition partition : partitions) { for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) { long indexId = index.getId(); int schemaHash = olapTable.getSchemaHashByIndexId(indexId); for (Tablet tablet : index.getTablets()) { long tabletId = tablet.getId(); TPushType type = TPushType.DELETE; for (Replica replica : tablet.getReplicas()) { long replicaId = replica.getId(); long backendId = replica.getBackendId(); countDownLatch.addMark(backendId, tabletId); PushTask pushTask = new PushTask(null, replica.getBackendId(), db.getId(), olapTable.getId(), partition.getId(), indexId, tabletId, replicaId, schemaHash, -1, 0, "", -1, 0, -1, type, conditions, true, TPriority.NORMAL, TTaskType.REALTIME_PUSH, transactionId, Catalog.getCurrentGlobalTransactionMgr().getTransactionIDGenerator().getNextTransactionId()); pushTask.setIsSchemaChanging(false); pushTask.setCountDownLatch(countDownLatch); if (AgentTaskQueue.addTask(pushTask)) { batchTask.addTask(pushTask); deleteJob.addPushTask(pushTask); deleteJob.addTablet(tabletId); } } } } } if (batchTask.getTaskNum() > 0) { AgentTaskExecutor.submit(batchTask); } } catch (Throwable t) { LOG.warn("error occurred during delete process", t); if (Catalog.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), transactionId) != null) { cancelJob(deleteJob, CancelType.UNKNOWN, t.getMessage()); } throw new DdlException(t.getMessage(), t); } finally { table.readUnlock(); } long timeoutMs = deleteJob.getTimeoutMs(); LOG.info("waiting delete Job finish, signature: {}, timeout: {}", transactionId, timeoutMs); boolean ok = false; try { ok = countDownLatch.await(timeoutMs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { LOG.warn("InterruptedException: ", e); ok = false; } if (!ok) { String errMsg = ""; List<Entry<Long, Long>> unfinishedMarks = countDownLatch.getLeftMarks(); List<Entry<Long, Long>> subList = unfinishedMarks.subList(0, Math.min(unfinishedMarks.size(), 5)); if (!subList.isEmpty()) { errMsg = "unfinished replicas [BackendId=TabletId]: " + Joiner.on(", ").join(subList); } LOG.warn(errMsg); try { deleteJob.checkAndUpdateQuorum(); } catch (MetaNotFoundException e) { cancelJob(deleteJob, CancelType.METADATA_MISSING, e.getMessage()); throw new DdlException(e.getMessage(), e); } DeleteState state = deleteJob.getState(); switch (state) { case UN_QUORUM: LOG.warn("delete job timeout: transactionId {}, timeout {}, {}", transactionId, timeoutMs, errMsg); cancelJob(deleteJob, CancelType.TIMEOUT, "delete job timeout"); throw new DdlException("failed to execute delete. transaction id " + transactionId + ", timeout(ms) " + timeoutMs + ", " + errMsg); case QUORUM_FINISHED: case FINISHED: try { long nowQuorumTimeMs = System.currentTimeMillis(); long endQuorumTimeoutMs = nowQuorumTimeMs + timeoutMs / 2; while (deleteJob.getState() == DeleteState.QUORUM_FINISHED && endQuorumTimeoutMs > nowQuorumTimeMs) { deleteJob.checkAndUpdateQuorum(); Thread.sleep(1000); nowQuorumTimeMs = System.currentTimeMillis(); LOG.debug("wait for quorum finished delete job: {}, txn id: {}" + deleteJob.getId(), transactionId); } } catch (MetaNotFoundException e) { cancelJob(deleteJob, CancelType.METADATA_MISSING, e.getMessage()); throw new DdlException(e.getMessage(), e); } catch (InterruptedException e) { cancelJob(deleteJob, CancelType.UNKNOWN, e.getMessage()); throw new DdlException(e.getMessage(), e); } commitJob(deleteJob, db, table, timeoutMs); break; default: Preconditions.checkState(false, "wrong delete job state: " + state.name()); break; } } else { commitJob(deleteJob, db, table, timeoutMs); } } finally { if (!FeConstants.runningUnitTest) { clearJob(deleteJob); } } } private void commitJob(DeleteJob job, Database db, Table table, long timeoutMs) throws DdlException, QueryStateException { TransactionStatus status = null; try { unprotectedCommitJob(job, db, table, timeoutMs); status = Catalog.getCurrentGlobalTransactionMgr(). getTransactionState(db.getId(), job.getTransactionId()).getTransactionStatus(); } catch (UserException e) { if (cancelJob(job, CancelType.COMMIT_FAIL, e.getMessage())) { throw new DdlException(e.getMessage(), e); } } StringBuilder sb = new StringBuilder(); sb.append("{'label':'").append(job.getLabel()).append("', 'status':'").append(status.name()); sb.append("', 'txnId':'").append(job.getTransactionId()).append("'"); switch (status) { case COMMITTED: { String errMsg = "delete job is committed but may be taking effect later"; sb.append(", 'err':'").append(errMsg).append("'"); sb.append("}"); throw new QueryStateException(MysqlStateType.OK, sb.toString()); } case VISIBLE: { sb.append("}"); throw new QueryStateException(MysqlStateType.OK, sb.toString()); } default: Preconditions.checkState(false, "wrong transaction status: " + status.name()); break; } }
Hm this is concerning - if we start with 0 shards, the queue is forever doomed to be of size 1 which is probably quite bad for performance.
void start() throws TransientKinesisException { ImmutableMap.Builder<String, ShardRecordsIterator> shardsMap = ImmutableMap.builder(); for (ShardCheckpoint checkpoint : initialCheckpoint) { shardsMap.put(checkpoint.getShardId(), createShardIterator(kinesis, checkpoint)); } shardIteratorsMap.set(shardsMap.build()); if (!shardIteratorsMap.get().isEmpty()) { recordsQueue = new ArrayBlockingQueue<>( queueCapacityPerShard * shardIteratorsMap.get().size()); startReadingShards(shardIteratorsMap.get().values()); } else { recordsQueue = new ArrayBlockingQueue<>(1); } }
}
void start() throws TransientKinesisException { ImmutableMap.Builder<String, ShardRecordsIterator> shardsMap = ImmutableMap.builder(); for (ShardCheckpoint checkpoint : initialCheckpoint) { shardsMap.put(checkpoint.getShardId(), createShardIterator(kinesis, checkpoint)); } shardIteratorsMap.set(shardsMap.build()); if (!shardIteratorsMap.get().isEmpty()) { recordsQueue = new ArrayBlockingQueue<>( queueCapacityPerShard * shardIteratorsMap.get().size()); startReadingShards(shardIteratorsMap.get().values()); } else { recordsQueue = new ArrayBlockingQueue<>(1); } }
class ShardReadersPool { private static final Logger LOG = LoggerFactory.getLogger(ShardReadersPool.class); private static final int DEFAULT_CAPACITY_PER_SHARD = 10_000; /** * Executor service for running the threads that read records from shards handled by this pool. * Each thread runs the {@link ShardReadersPool * handles exactly one shard. */ private final ExecutorService executorService; /** * A Bounded buffer for read records. Records are added to this buffer within * {@link ShardReadersPool * in {@link ShardReadersPool */ private BlockingQueue<KinesisRecord> recordsQueue; /** * A reference to an immutable mapping of {@link ShardRecordsIterator} instances to shard ids. * This map is replaced with a new one when resharding operation on any handled shard occurs. */ private final AtomicReference<ImmutableMap<String, ShardRecordsIterator>> shardIteratorsMap; /** * A map for keeping the current number of records stored in a buffer per shard. */ private final ConcurrentMap<String, AtomicInteger> numberOfRecordsInAQueueByShard; private final SimplifiedKinesisClient kinesis; private final KinesisReaderCheckpoint initialCheckpoint; private final int queueCapacityPerShard; private final AtomicBoolean poolOpened = new AtomicBoolean(true); ShardReadersPool(SimplifiedKinesisClient kinesis, KinesisReaderCheckpoint initialCheckpoint) { this(kinesis, initialCheckpoint, DEFAULT_CAPACITY_PER_SHARD); } ShardReadersPool(SimplifiedKinesisClient kinesis, KinesisReaderCheckpoint initialCheckpoint, int queueCapacityPerShard) { this.kinesis = kinesis; this.initialCheckpoint = initialCheckpoint; this.queueCapacityPerShard = queueCapacityPerShard; this.executorService = Executors.newCachedThreadPool(); this.numberOfRecordsInAQueueByShard = new ConcurrentHashMap<>(); this.shardIteratorsMap = new AtomicReference<>(); } private void startReadingShards(Iterable<ShardRecordsIterator> shardRecordsIterators) { for (final ShardRecordsIterator recordsIterator : shardRecordsIterators) { numberOfRecordsInAQueueByShard.put(recordsIterator.getShardId(), new AtomicInteger()); executorService.submit(new Runnable() { @Override public void run() { readLoop(recordsIterator); } }); } } private void readLoop(ShardRecordsIterator shardRecordsIterator) { while (poolOpened.get()) { try { List<KinesisRecord> kinesisRecords; try { kinesisRecords = shardRecordsIterator.readNextBatch(); } catch (KinesisShardClosedException e) { LOG.info("Shard iterator for {} shard is closed, finishing the read loop", shardRecordsIterator.getShardId(), e); waitUntilAllShardRecordsRead(shardRecordsIterator); readFromSuccessiveShards(shardRecordsIterator); break; } for (KinesisRecord kinesisRecord : kinesisRecords) { recordsQueue.put(kinesisRecord); numberOfRecordsInAQueueByShard.get(kinesisRecord.getShardId()).incrementAndGet(); } } catch (TransientKinesisException e) { LOG.warn("Transient exception occurred.", e); } catch (InterruptedException e) { LOG.warn("Thread was interrupted, finishing the read loop", e); break; } catch (Throwable e) { LOG.error("Unexpected exception occurred", e); } } LOG.info("Kinesis Shard read loop has finished"); } CustomOptional<KinesisRecord> nextRecord() { try { KinesisRecord record = recordsQueue.poll(1, TimeUnit.SECONDS); if (record == null) { return CustomOptional.absent(); } shardIteratorsMap.get().get(record.getShardId()).ackRecord(record); numberOfRecordsInAQueueByShard.get(record.getShardId()).decrementAndGet(); return CustomOptional.of(record); } catch (InterruptedException e) { LOG.warn("Interrupted while waiting for KinesisRecord from the buffer"); return CustomOptional.absent(); } } void stop() { LOG.info("Closing shard iterators pool"); poolOpened.set(false); executorService.shutdownNow(); boolean isShutdown = false; int attemptsLeft = 3; while (!isShutdown && attemptsLeft-- > 0) { try { isShutdown = executorService.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { LOG.error("Interrupted while waiting for the executor service to shutdown"); throw new RuntimeException(e); } if (!isShutdown && attemptsLeft > 0) { LOG.warn("Executor service is taking long time to shutdown, will retry. {} attempts left", attemptsLeft); } } } boolean allShardsUpToDate() { boolean shardsUpToDate = true; ImmutableMap<String, ShardRecordsIterator> currentShardIterators = shardIteratorsMap.get(); for (ShardRecordsIterator shardRecordsIterator : currentShardIterators.values()) { shardsUpToDate &= shardRecordsIterator.isUpToDate(); } return shardsUpToDate; } KinesisReaderCheckpoint getCheckpointMark() { ImmutableMap<String, ShardRecordsIterator> currentShardIterators = shardIteratorsMap.get(); return new KinesisReaderCheckpoint(transform(currentShardIterators.values(), new Function<ShardRecordsIterator, ShardCheckpoint>() { @Override public ShardCheckpoint apply(ShardRecordsIterator shardRecordsIterator) { checkArgument(shardRecordsIterator != null, "shardRecordsIterator can not be null"); return shardRecordsIterator.getCheckpoint(); } })); } ShardRecordsIterator createShardIterator(SimplifiedKinesisClient kinesis, ShardCheckpoint checkpoint) throws TransientKinesisException { return new ShardRecordsIterator(checkpoint, kinesis); } /** * Waits until all records read from given shardRecordsIterator are taken from * {@link * Uses {@link */ private void waitUntilAllShardRecordsRead(ShardRecordsIterator shardRecordsIterator) throws InterruptedException { while (!allShardRecordsRead(shardRecordsIterator)) { Thread.sleep(TimeUnit.SECONDS.toMillis(1)); } } private boolean allShardRecordsRead(final ShardRecordsIterator shardRecordsIterator) { return numberOfRecordsInAQueueByShard.get(shardRecordsIterator.getShardId()).get() == 0; } /** * <p> * Tries to find successors of a given shard and start reading them. Each closed shard can have * 0, 1 or 2 successors * <ul> * <li>0 successors - when shard was merged with another shard and this one is considered * adjacent by merge operation</li> * <li>1 successor - when shard was merged with another shard and this one is considered a * parent by merge operation</li> * <li>2 successors - when shard was split into two shards</li> * </ul> * </p> * <p> * Once shard successors are established, the transition to reading new shards can begin. * During this operation, the immutable {@link ShardReadersPool * is replaced with a new one holding references to {@link ShardRecordsIterator} instances for * open shards only. Potentially there might be more shard iterators closing at the same time so * {@link ShardReadersPool * the updates. Then, the counter for already closed shard is removed from * {@link ShardReadersPool * </p> * <p> * Finally when update is finished, new threads are spawned for reading the successive shards. * The thread that handled reading from already closed shard can finally complete. * </p> */ private void readFromSuccessiveShards(final ShardRecordsIterator closedShardIterator) throws TransientKinesisException { List<ShardRecordsIterator> successiveShardRecordIterators = closedShardIterator .findSuccessiveShardRecordIterators(); ImmutableMap<String, ShardRecordsIterator> current; ImmutableMap<String, ShardRecordsIterator> updated; do { current = shardIteratorsMap.get(); updated = createMapWithSuccessiveShards(current, closedShardIterator, successiveShardRecordIterators); } while (!shardIteratorsMap.compareAndSet(current, updated)); numberOfRecordsInAQueueByShard.remove(closedShardIterator.getShardId()); startReadingShards(successiveShardRecordIterators); } private ImmutableMap<String, ShardRecordsIterator> createMapWithSuccessiveShards( ImmutableMap<String, ShardRecordsIterator> current, ShardRecordsIterator closedShardIterator, List<ShardRecordsIterator> successiveShardRecordIterators) throws TransientKinesisException { ImmutableMap.Builder<String, ShardRecordsIterator> shardsMap = ImmutableMap.builder(); Iterable<ShardRecordsIterator> allShards = Iterables .concat(current.values(), successiveShardRecordIterators); for (ShardRecordsIterator iterator : allShards) { if (!closedShardIterator.getShardId().equals(iterator.getShardId())) { shardsMap.put(iterator.getShardId(), iterator); } } return shardsMap.build(); } }
class ShardReadersPool { private static final Logger LOG = LoggerFactory.getLogger(ShardReadersPool.class); private static final int DEFAULT_CAPACITY_PER_SHARD = 10_000; /** * Executor service for running the threads that read records from shards handled by this pool. * Each thread runs the {@link ShardReadersPool * handles exactly one shard. */ private final ExecutorService executorService; /** * A Bounded buffer for read records. Records are added to this buffer within * {@link ShardReadersPool * in {@link ShardReadersPool */ private BlockingQueue<KinesisRecord> recordsQueue; /** * A reference to an immutable mapping of {@link ShardRecordsIterator} instances to shard ids. * This map is replaced with a new one when resharding operation on any handled shard occurs. */ private final AtomicReference<ImmutableMap<String, ShardRecordsIterator>> shardIteratorsMap; /** * A map for keeping the current number of records stored in a buffer per shard. */ private final ConcurrentMap<String, AtomicInteger> numberOfRecordsInAQueueByShard; private final SimplifiedKinesisClient kinesis; private final KinesisReaderCheckpoint initialCheckpoint; private final int queueCapacityPerShard; private final AtomicBoolean poolOpened = new AtomicBoolean(true); ShardReadersPool(SimplifiedKinesisClient kinesis, KinesisReaderCheckpoint initialCheckpoint) { this(kinesis, initialCheckpoint, DEFAULT_CAPACITY_PER_SHARD); } ShardReadersPool(SimplifiedKinesisClient kinesis, KinesisReaderCheckpoint initialCheckpoint, int queueCapacityPerShard) { this.kinesis = kinesis; this.initialCheckpoint = initialCheckpoint; this.queueCapacityPerShard = queueCapacityPerShard; this.executorService = Executors.newCachedThreadPool(); this.numberOfRecordsInAQueueByShard = new ConcurrentHashMap<>(); this.shardIteratorsMap = new AtomicReference<>(); } private void startReadingShards(Iterable<ShardRecordsIterator> shardRecordsIterators) { for (final ShardRecordsIterator recordsIterator : shardRecordsIterators) { numberOfRecordsInAQueueByShard.put(recordsIterator.getShardId(), new AtomicInteger()); executorService.submit(() -> readLoop(recordsIterator)); } } private void readLoop(ShardRecordsIterator shardRecordsIterator) { while (poolOpened.get()) { try { List<KinesisRecord> kinesisRecords; try { kinesisRecords = shardRecordsIterator.readNextBatch(); } catch (KinesisShardClosedException e) { LOG.info("Shard iterator for {} shard is closed, finishing the read loop", shardRecordsIterator.getShardId(), e); waitUntilAllShardRecordsRead(shardRecordsIterator); readFromSuccessiveShards(shardRecordsIterator); break; } for (KinesisRecord kinesisRecord : kinesisRecords) { recordsQueue.put(kinesisRecord); numberOfRecordsInAQueueByShard.get(kinesisRecord.getShardId()).incrementAndGet(); } } catch (TransientKinesisException e) { LOG.warn("Transient exception occurred.", e); } catch (InterruptedException e) { LOG.warn("Thread was interrupted, finishing the read loop", e); break; } catch (Throwable e) { LOG.error("Unexpected exception occurred", e); } } LOG.info("Kinesis Shard read loop has finished"); } CustomOptional<KinesisRecord> nextRecord() { try { KinesisRecord record = recordsQueue.poll(1, TimeUnit.SECONDS); if (record == null) { return CustomOptional.absent(); } shardIteratorsMap.get().get(record.getShardId()).ackRecord(record); numberOfRecordsInAQueueByShard.get(record.getShardId()).decrementAndGet(); return CustomOptional.of(record); } catch (InterruptedException e) { LOG.warn("Interrupted while waiting for KinesisRecord from the buffer"); return CustomOptional.absent(); } } void stop() { LOG.info("Closing shard iterators pool"); poolOpened.set(false); executorService.shutdownNow(); boolean isShutdown = false; int attemptsLeft = 3; while (!isShutdown && attemptsLeft-- > 0) { try { isShutdown = executorService.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { LOG.error("Interrupted while waiting for the executor service to shutdown"); throw new RuntimeException(e); } if (!isShutdown && attemptsLeft > 0) { LOG.warn("Executor service is taking long time to shutdown, will retry. {} attempts left", attemptsLeft); } } } boolean allShardsUpToDate() { boolean shardsUpToDate = true; ImmutableMap<String, ShardRecordsIterator> currentShardIterators = shardIteratorsMap.get(); for (ShardRecordsIterator shardRecordsIterator : currentShardIterators.values()) { shardsUpToDate &= shardRecordsIterator.isUpToDate(); } return shardsUpToDate; } KinesisReaderCheckpoint getCheckpointMark() { ImmutableMap<String, ShardRecordsIterator> currentShardIterators = shardIteratorsMap.get(); return new KinesisReaderCheckpoint( currentShardIterators.values().stream().map(shardRecordsIterator -> { checkArgument(shardRecordsIterator != null, "shardRecordsIterator can not be null"); return shardRecordsIterator.getCheckpoint(); }).collect(Collectors.toList())); } ShardRecordsIterator createShardIterator(SimplifiedKinesisClient kinesis, ShardCheckpoint checkpoint) throws TransientKinesisException { return new ShardRecordsIterator(checkpoint, kinesis); } /** * Waits until all records read from given shardRecordsIterator are taken from * {@link * Uses {@link */ private void waitUntilAllShardRecordsRead(ShardRecordsIterator shardRecordsIterator) throws InterruptedException { AtomicInteger numberOfShardRecordsInAQueue = numberOfRecordsInAQueueByShard .get(shardRecordsIterator.getShardId()); while (!(numberOfShardRecordsInAQueue.get() == 0)) { Thread.sleep(TimeUnit.SECONDS.toMillis(1)); } } /** * <p> * Tries to find successors of a given shard and start reading them. Each closed shard can have * 0, 1 or 2 successors * <ul> * <li>0 successors - when shard was merged with another shard and this one is considered * adjacent by merge operation</li> * <li>1 successor - when shard was merged with another shard and this one is considered a * parent by merge operation</li> * <li>2 successors - when shard was split into two shards</li> * </ul> * </p> * <p> * Once shard successors are established, the transition to reading new shards can begin. * During this operation, the immutable {@link ShardReadersPool * is replaced with a new one holding references to {@link ShardRecordsIterator} instances for * open shards only. Potentially there might be more shard iterators closing at the same time so * {@link ShardReadersPool * the updates. Then, the counter for already closed shard is removed from * {@link ShardReadersPool * </p> * <p> * Finally when update is finished, new threads are spawned for reading the successive shards. * The thread that handled reading from already closed shard can finally complete. * </p> */ private void readFromSuccessiveShards(final ShardRecordsIterator closedShardIterator) throws TransientKinesisException { List<ShardRecordsIterator> successiveShardRecordIterators = closedShardIterator .findSuccessiveShardRecordIterators(); ImmutableMap<String, ShardRecordsIterator> current; ImmutableMap<String, ShardRecordsIterator> updated; do { current = shardIteratorsMap.get(); updated = createMapWithSuccessiveShards(current, closedShardIterator, successiveShardRecordIterators); } while (!shardIteratorsMap.compareAndSet(current, updated)); numberOfRecordsInAQueueByShard.remove(closedShardIterator.getShardId()); startReadingShards(successiveShardRecordIterators); } private ImmutableMap<String, ShardRecordsIterator> createMapWithSuccessiveShards( ImmutableMap<String, ShardRecordsIterator> current, ShardRecordsIterator closedShardIterator, List<ShardRecordsIterator> successiveShardRecordIterators) throws TransientKinesisException { ImmutableMap.Builder<String, ShardRecordsIterator> shardsMap = ImmutableMap.builder(); Iterable<ShardRecordsIterator> allShards = Iterables .concat(current.values(), successiveShardRecordIterators); for (ShardRecordsIterator iterator : allShards) { if (!closedShardIterator.getShardId().equals(iterator.getShardId())) { shardsMap.put(iterator.getShardId(), iterator); } } return shardsMap.build(); } }
Ok, I changed a total time of retrying to 1 minute. I believe it should be quite enough for most of the cases as a default value.
private void flush(int numMax) throws InterruptedException, IOException { int retries = spec.getRetries(); int numOutstandingRecords = producer.getOutstandingRecordsCount(); while (numOutstandingRecords > numMax && retries-- > 0) { producer.flush(); Thread.sleep(1000); numOutstandingRecords = producer.getOutstandingRecordsCount(); } if (numOutstandingRecords > numMax) { String message = String.format( "Number of outstanding records [%d] are greater than required [%d].", numOutstandingRecords, numMax); LOG.error(message); throw new IOException(message); } }
}
private void flush(int numMax) throws InterruptedException, IOException { int retries = spec.getRetries(); int numOutstandingRecords = producer.getOutstandingRecordsCount(); int retryTimeout = 1000; while (numOutstandingRecords > numMax && retries-- > 0) { producer.flush(); Thread.sleep(retryTimeout); numOutstandingRecords = producer.getOutstandingRecordsCount(); retryTimeout *= 2; } if (numOutstandingRecords > numMax) { String message = String.format( "After [%d] retries, number of outstanding records [%d] is still greater than " + "required [%d].", spec.getRetries(), numOutstandingRecords, numMax); LOG.error(message); throw new IOException(message); } }
class KinesisWriterFn extends DoFn<byte[], Void> { private static final int MAX_NUM_RECORDS = 100 * 1000; private static final int MAX_NUM_FAILURES = 10; private final KinesisIO.Write spec; private transient IKinesisProducer producer; private transient KinesisPartitioner partitioner; private transient LinkedBlockingDeque<KinesisWriteException> failures; public KinesisWriterFn(KinesisIO.Write spec) { this.spec = spec; } @Setup public void setup() throws Exception { checkArgument( streamExists(spec.getAWSClientsProvider().getKinesisClient(), spec.getStreamName()), "Stream %s does not exist", spec.getStreamName()); Properties props = spec.getProducerProperties(); if (props == null) { props = new Properties(); } KinesisProducerConfiguration config = KinesisProducerConfiguration.fromProperties(props); config.setCredentialsRefreshDelay(100); producer = spec.getAWSClientsProvider().createKinesisProducer(config); if (spec.getPartitioner() != null) { partitioner = spec.getPartitioner(); } /** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */ failures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES); } /** * It adds a record asynchronously which then should be delivered by Kinesis producer in * background (Kinesis producer forks native processes to do this job). * * <p>The records can be batched and then they will be sent in one HTTP request. Amazon KPL * supports two types of batching - aggregation and collection - and they can be configured by * producer properties. * * <p>More details can be found here: * <a href="https: * <a href="https: */ @ProcessElement public void processElement(ProcessContext c) throws Exception { checkForFailures(); flush(MAX_NUM_RECORDS); ByteBuffer data = ByteBuffer.wrap(c.element()); String partitionKey = spec.getPartitionKey(); String explicitHashKey = null; if (partitioner != null) { partitionKey = partitioner.getPartitionKey(c.element()); explicitHashKey = partitioner.getExplicitHashKey(c.element()); } ListenableFuture<UserRecordResult> f = producer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data); Futures.addCallback(f, new UserRecordResultFutureCallback()); } @FinishBundle public void finishBundle() throws Exception { checkForFailures(); flushAll(); } @Teardown public void tearDown() throws Exception { if (producer != null) { producer.destroy(); producer = null; } } /** * Flush outstanding records until the total number will be less than required or * number of retries will be exhausted. */ private void flushAll() throws InterruptedException, IOException { flush(0); } /** * If any write has asynchronously failed, fail the bundle with a useful error. */ private void checkForFailures() throws IOException { if (failures.isEmpty()) { return; } StringBuilder logEntry = new StringBuilder(); int i = 0; while (!failures.isEmpty()) { i++; KinesisWriteException exc = failures.remove(); logEntry.append("\n").append(exc.getMessage()); Throwable cause = exc.getCause(); if (cause != null) { logEntry.append(": ").append(cause.getMessage()); if (cause instanceof UserRecordFailedException) { List<Attempt> attempts = ((UserRecordFailedException) cause).getResult() .getAttempts(); for (Attempt attempt : attempts) { if (attempt.getErrorMessage() != null) { logEntry.append("\n").append(attempt.getErrorMessage()); } } } } } failures.clear(); String message = String.format( "Some errors occurred writing to Kinesis. First %d errors: %s", i, logEntry.toString()); LOG.error(message); throw new IOException(message); } private class UserRecordResultFutureCallback implements FutureCallback<UserRecordResult> { @Override public void onFailure(Throwable cause) { failures.offer(new KinesisWriteException(cause)); } @Override public void onSuccess(UserRecordResult result) { if (!result.isSuccessful()) { failures.add(new KinesisWriteException("Put record was not successful.")); } } } }
class KinesisWriterFn extends DoFn<byte[], Void> { private static final int MAX_NUM_RECORDS = 100 * 1000; private static final int MAX_NUM_FAILURES = 10; private final KinesisIO.Write spec; private transient IKinesisProducer producer; private transient KinesisPartitioner partitioner; private transient LinkedBlockingDeque<KinesisWriteException> failures; public KinesisWriterFn(KinesisIO.Write spec) { this.spec = spec; } @Setup public void setup() throws Exception { checkArgument( streamExists(spec.getAWSClientsProvider().getKinesisClient(), spec.getStreamName()), "Stream %s does not exist", spec.getStreamName()); Properties props = spec.getProducerProperties(); if (props == null) { props = new Properties(); } KinesisProducerConfiguration config = KinesisProducerConfiguration.fromProperties(props); config.setCredentialsRefreshDelay(100); producer = spec.getAWSClientsProvider().createKinesisProducer(config); if (spec.getPartitioner() != null) { partitioner = spec.getPartitioner(); } /** Keep only the first {@link MAX_NUM_FAILURES} occurred exceptions */ failures = new LinkedBlockingDeque<>(MAX_NUM_FAILURES); } /** * It adds a record asynchronously which then should be delivered by Kinesis producer in * background (Kinesis producer forks native processes to do this job). * * <p>The records can be batched and then they will be sent in one HTTP request. Amazon KPL * supports two types of batching - aggregation and collection - and they can be configured by * producer properties. * * <p>More details can be found here: * <a href="https: * <a href="https: */ @ProcessElement public void processElement(ProcessContext c) throws Exception { checkForFailures(); flush(MAX_NUM_RECORDS); ByteBuffer data = ByteBuffer.wrap(c.element()); String partitionKey = spec.getPartitionKey(); String explicitHashKey = null; if (partitioner != null) { partitionKey = partitioner.getPartitionKey(c.element()); explicitHashKey = partitioner.getExplicitHashKey(c.element()); } ListenableFuture<UserRecordResult> f = producer.addUserRecord(spec.getStreamName(), partitionKey, explicitHashKey, data); Futures.addCallback(f, new UserRecordResultFutureCallback()); } @FinishBundle public void finishBundle() throws Exception { flushAll(); checkForFailures(); } @Teardown public void tearDown() throws Exception { if (producer != null) { producer.destroy(); producer = null; } } /** * Flush outstanding records until the total number will be less than required or * the number of retries will be exhausted. The retry timeout starts from 1 second and it * doubles on every iteration. */ private void flushAll() throws InterruptedException, IOException { flush(0); } /** * If any write has asynchronously failed, fail the bundle with a useful error. */ private void checkForFailures() throws IOException { if (failures.isEmpty()) { return; } StringBuilder logEntry = new StringBuilder(); int i = 0; while (!failures.isEmpty()) { i++; KinesisWriteException exc = failures.remove(); logEntry.append("\n").append(exc.getMessage()); Throwable cause = exc.getCause(); if (cause != null) { logEntry.append(": ").append(cause.getMessage()); if (cause instanceof UserRecordFailedException) { List<Attempt> attempts = ((UserRecordFailedException) cause).getResult() .getAttempts(); for (Attempt attempt : attempts) { if (attempt.getErrorMessage() != null) { logEntry.append("\n").append(attempt.getErrorMessage()); } } } } } failures.clear(); String message = String.format( "Some errors occurred writing to Kinesis. First %d errors: %s", i, logEntry.toString()); throw new IOException(message); } private class UserRecordResultFutureCallback implements FutureCallback<UserRecordResult> { @Override public void onFailure(Throwable cause) { failures.offer(new KinesisWriteException(cause)); } @Override public void onSuccess(UserRecordResult result) { if (!result.isSuccessful()) { failures.offer(new KinesisWriteException("Put record was not successful.", new UserRecordFailedException(result))); } } } }
you are right, i will fix it.
private boolean executeStatement(String statement, ExecutionMode executionMode) { try { final Optional<Operation> operation = parseCommand(statement); operation.ifPresent(op -> callOperation(op, executionMode)); } catch (SqlExecutionException e) { Throwable t = ExceptionUtils.getRootCause(e); if (t instanceof InterruptedException) { printExecutionException(t); } else { printExecutionException(e); } return false; } return true; }
}
private boolean executeStatement(String statement, ExecutionMode executionMode) { try { final Optional<Operation> operation = parseCommand(statement); operation.ifPresent( op -> { final Thread thread = Thread.currentThread(); final Terminal.SignalHandler previousHandler = terminal.handle( Terminal.Signal.INT, (signal) -> thread.interrupt()); try { callOperation(op, executionMode); } finally { terminal.handle(Terminal.Signal.INT, previousHandler); } }); } catch (SqlExecutionException e) { printExecutionException(e); return false; } return true; }
class CliClient implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(CliClient.class); public static final Supplier<Terminal> DEFAULT_TERMINAL_FACTORY = TerminalUtils::createDefaultTerminal; private final Executor executor; private final String sessionId; private final Path historyFilePath; private final String prompt; private final @Nullable MaskingCallback inputTransformer; private final Supplier<Terminal> terminalFactory; private Terminal terminal; private boolean isRunning; private boolean isStatementSetMode; private List<ModifyOperation> statementSetOperations; private static final int PLAIN_TERMINAL_WIDTH = 80; private static final int PLAIN_TERMINAL_HEIGHT = 30; /** * Creates a CLI instance with a custom terminal. Make sure to close the CLI instance afterwards * using {@link */ @VisibleForTesting public CliClient( Supplier<Terminal> terminalFactory, String sessionId, Executor executor, Path historyFilePath, @Nullable MaskingCallback inputTransformer) { this.terminalFactory = terminalFactory; this.sessionId = sessionId; this.executor = executor; this.inputTransformer = inputTransformer; this.historyFilePath = historyFilePath; prompt = new AttributedStringBuilder() .style(AttributedStyle.DEFAULT.foreground(AttributedStyle.GREEN)) .append("Flink SQL") .style(AttributedStyle.DEFAULT) .append("> ") .toAnsi(); } /** * Creates a CLI instance with a prepared terminal. Make sure to close the CLI instance * afterwards using {@link */ public CliClient( Supplier<Terminal> terminalFactory, String sessionId, Executor executor, Path historyFilePath) { this(terminalFactory, sessionId, executor, historyFilePath, null); } public Terminal getTerminal() { return terminal; } public String getSessionId() { return this.sessionId; } public void clearTerminal() { if (isPlainTerminal()) { for (int i = 0; i < 200; i++) { terminal.writer().println(); } } else { terminal.puts(InfoCmp.Capability.clear_screen); } } public boolean isPlainTerminal() { return terminal.getWidth() == 0 && terminal.getHeight() == 0; } public int getWidth() { if (isPlainTerminal()) { return PLAIN_TERMINAL_WIDTH; } return terminal.getWidth(); } public int getHeight() { if (isPlainTerminal()) { return PLAIN_TERMINAL_HEIGHT; } return terminal.getHeight(); } public Executor getExecutor() { return executor; } /** Closes the CLI instance. */ public void close() { if (terminal != null) { closeTerminal(); } } /** Opens the interactive CLI shell. */ public void executeInInteractiveMode() { try { terminal = terminalFactory.get(); executeInteractive(); } finally { closeTerminal(); } } public void executeInNonInteractiveMode(String content) { try { terminal = terminalFactory.get(); executeFile(content, ExecutionMode.NON_INTERACTIVE_EXECUTION); } finally { closeTerminal(); } } public boolean executeInitialization(String content) { try { OutputStream outputStream = new ByteArrayOutputStream(256); terminal = TerminalUtils.createDumbTerminal(outputStream); boolean success = executeFile(content, ExecutionMode.INITIALIZATION); LOG.info(outputStream.toString()); return success; } finally { closeTerminal(); } } enum ExecutionMode { INTERACTIVE_EXECUTION, NON_INTERACTIVE_EXECUTION, INITIALIZATION } /** * Execute statement from the user input and prints status information and/or errors on the * terminal. */ private void executeInteractive() { isRunning = true; LineReader lineReader = createLineReader(terminal); terminal.writer().println(); terminal.writer().flush(); terminal.writer().append(CliStrings.MESSAGE_WELCOME); while (isRunning) { terminal.writer().append("\n"); terminal.flush(); String line; try { line = lineReader.readLine(prompt, null, inputTransformer, null); } catch (UserInterruptException e) { continue; } catch (EndOfFileException | IOError e) { break; } catch (Throwable t) { throw new SqlClientException("Could not read from command line.", t); } if (line == null) { continue; } executeStatement(line, ExecutionMode.INTERACTIVE_EXECUTION); } } /** * Execute content from Sql file and prints status information and/or errors on the terminal. * * @param content SQL file content */ private boolean executeFile(String content, ExecutionMode mode) { terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EXECUTE_FILE).toAnsi()); for (String statement : CliStatementSplitter.splitContent(content)) { terminal.writer() .println(new AttributedString(String.format("%s%s", prompt, statement))); terminal.flush(); if (!executeStatement(statement, mode)) { return false; } } return true; } private void validate(Operation operation, ExecutionMode executionMode) { if (executionMode.equals(ExecutionMode.INITIALIZATION)) { if (!(operation instanceof SetOperation) && !(operation instanceof ResetOperation) && !(operation instanceof CreateOperation) && !(operation instanceof DropOperation) && !(operation instanceof UseOperation) && !(operation instanceof AlterOperation) && !(operation instanceof LoadModuleOperation) && !(operation instanceof UnloadModuleOperation) && !(operation instanceof AddJarOperation) && !(operation instanceof RemoveJarOperation)) { throw new SqlExecutionException( "Unsupported operation in sql init file: " + operation.asSummaryString()); } } else if (executionMode.equals(ExecutionMode.NON_INTERACTIVE_EXECUTION)) { ResultMode mode = executor.getSessionConfig(sessionId).get(EXECUTION_RESULT_MODE); if (operation instanceof QueryOperation && !mode.equals(TABLEAU)) { throw new SqlExecutionException( String.format( "In non-interactive mode, it only supports to use %s as value of %s when execute query. Please add 'SET %s=%s;' in the sql file.", TABLEAU, EXECUTION_RESULT_MODE.key(), EXECUTION_RESULT_MODE.key(), TABLEAU)); } } if (isStatementSetMode) { if (!(operation instanceof CatalogSinkModifyOperation || operation instanceof EndStatementSetOperation)) { throw new SqlExecutionException(MESSAGE_STATEMENT_SET_SQL_EXECUTION_ERROR); } } } private Optional<Operation> parseCommand(String stmt) { stmt = stmt.trim(); if (stmt.endsWith(";")) { stmt = stmt.substring(0, stmt.length() - 1).trim(); } if (stmt.trim().isEmpty()) { return Optional.empty(); } Operation operation = executor.parseStatement(sessionId, stmt); return Optional.of(operation); } private void callOperation(Operation operation, ExecutionMode mode) { validate(operation, mode); final Thread thread = Thread.currentThread(); final Terminal.SignalHandler previousHandler = terminal.handle(Terminal.Signal.INT, (signal) -> thread.interrupt()); try { if (operation instanceof QuitOperation) { callQuit(); } else if (operation instanceof ClearOperation) { callClear(); } else if (operation instanceof HelpOperation) { callHelp(); } else if (operation instanceof SetOperation) { callSet((SetOperation) operation); } else if (operation instanceof ResetOperation) { callReset((ResetOperation) operation); } else if (operation instanceof CatalogSinkModifyOperation) { callInsert((CatalogSinkModifyOperation) operation); } else if (operation instanceof QueryOperation) { callSelect((QueryOperation) operation); } else if (operation instanceof ExplainOperation) { callExplain((ExplainOperation) operation); } else if (operation instanceof BeginStatementSetOperation) { callBeginStatementSet(); } else if (operation instanceof EndStatementSetOperation) { callEndStatementSet(); } else if (operation instanceof AddJarOperation) { callAddJar((AddJarOperation) operation); } else if (operation instanceof RemoveJarOperation) { callRemoveJar((RemoveJarOperation) operation); } else if (operation instanceof ShowJarsOperation) { callShowJars(); } else if (operation instanceof ShowCreateTableOperation) { callShowCreateTable((ShowCreateTableOperation) operation); } else { executeOperation(operation); } } finally { terminal.handle(Terminal.Signal.INT, previousHandler); } } private void callAddJar(AddJarOperation operation) { String jarPath = operation.getPath(); executor.addJar(sessionId, jarPath); printInfo(CliStrings.MESSAGE_ADD_JAR_STATEMENT); } private void callRemoveJar(RemoveJarOperation operation) { String jarPath = operation.getPath(); executor.removeJar(sessionId, jarPath); printInfo(CliStrings.MESSAGE_REMOVE_JAR_STATEMENT); } private void callShowJars() { List<String> jars = executor.listJars(sessionId); if (CollectionUtils.isEmpty(jars)) { terminal.writer().println("Empty set"); } else { jars.forEach(jar -> terminal.writer().println(jar)); } terminal.flush(); } private void callQuit() { printInfo(CliStrings.MESSAGE_QUIT); isRunning = false; } private void callClear() { clearTerminal(); } private void callReset(ResetOperation resetOperation) { if (!resetOperation.getKey().isPresent()) { executor.resetSessionProperties(sessionId); printInfo(CliStrings.MESSAGE_RESET); } else { String key = resetOperation.getKey().get(); executor.resetSessionProperty(sessionId, key); printInfo(MESSAGE_RESET_KEY); } } private void callSet(SetOperation setOperation) { if (setOperation.getKey().isPresent() && setOperation.getValue().isPresent()) { String key = setOperation.getKey().get().trim(); String value = setOperation.getValue().get().trim(); executor.setSessionProperty(sessionId, key, value); printInfo(MESSAGE_SET_KEY); } else { final Map<String, String> properties = executor.getSessionConfigMap(sessionId); if (properties.isEmpty()) { terminal.writer() .println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi()); } else { List<String> prettyEntries = new ArrayList<>(); for (String key : properties.keySet()) { prettyEntries.add( String.format( "'%s' = '%s'", EncodingUtils.escapeSingleQuotes(key), EncodingUtils.escapeSingleQuotes(properties.get(key)))); } prettyEntries.sort(String::compareTo); prettyEntries.forEach(entry -> terminal.writer().println(entry)); } terminal.flush(); } } private void callHelp() { terminal.writer().println(CliStrings.MESSAGE_HELP); terminal.flush(); } private void callSelect(QueryOperation operation) { final ResultDescriptor resultDesc = executor.executeQuery(sessionId, operation); if (resultDesc.isTableauMode()) { try (CliTableauResultView tableauResultView = new CliTableauResultView(terminal, executor, sessionId, resultDesc)) { tableauResultView.displayResults(); } } else { final CliResultView<?> view; if (resultDesc.isMaterialized()) { view = new CliTableResultView(this, resultDesc); } else { view = new CliChangelogResultView(this, resultDesc); } view.open(); printInfo(CliStrings.MESSAGE_RESULT_QUIT); } } private void callInsert(CatalogSinkModifyOperation operation) { if (isStatementSetMode) { statementSetOperations.add(operation); printInfo(CliStrings.MESSAGE_ADD_STATEMENT_TO_STATEMENT_SET); } else { callInserts(Collections.singletonList(operation)); } } private void callInserts(List<ModifyOperation> operations) { printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT); boolean sync = executor.getSessionConfig(sessionId).get(TABLE_DML_SYNC); if (sync) { printInfo(MESSAGE_WAIT_EXECUTE); } TableResult tableResult = executor.executeModifyOperations(sessionId, operations); checkState(tableResult.getJobClient().isPresent()); if (sync) { terminal.writer().println(CliStrings.messageInfo(MESSAGE_FINISH_STATEMENT).toAnsi()); } else { terminal.writer().println(CliStrings.messageInfo(MESSAGE_STATEMENT_SUBMITTED).toAnsi()); terminal.writer() .println( String.format( "Job ID: %s\n", tableResult.getJobClient().get().getJobID().toString())); } terminal.flush(); } public void callExplain(ExplainOperation operation) { printRawContent(operation); } public void callShowCreateTable(ShowCreateTableOperation operation) { printRawContent(operation); } public void printRawContent(Operation operation) { TableResult tableResult = executor.executeOperation(sessionId, operation); final String explanation = Objects.requireNonNull(tableResult.collect().next().getField(0)).toString(); terminal.writer().println(explanation); terminal.flush(); } private void callBeginStatementSet() { isStatementSetMode = true; statementSetOperations = new ArrayList<>(); printInfo(CliStrings.MESSAGE_BEGIN_STATEMENT_SET); } private void callEndStatementSet() { if (isStatementSetMode) { isStatementSetMode = false; if (!statementSetOperations.isEmpty()) { callInserts(statementSetOperations); } else { printInfo(CliStrings.MESSAGE_NO_STATEMENT_IN_STATEMENT_SET); } statementSetOperations = null; } else { throw new SqlExecutionException(MESSAGE_STATEMENT_SET_END_CALL_ERROR); } } private void executeOperation(Operation operation) { TableResult result = executor.executeOperation(sessionId, operation); if (TABLE_RESULT_OK == result) { printInfo(MESSAGE_EXECUTE_STATEMENT); } else { PrintUtils.printAsTableauForm( result.getResolvedSchema(), result.collect(), terminal.writer(), Integer.MAX_VALUE, "", false, false, CliUtils.getSessionTimeZone(executor.getSessionConfig(sessionId))); terminal.flush(); } } private void printExecutionException(Throwable t) { final String errorMessage = CliStrings.MESSAGE_SQL_EXECUTION_ERROR; LOG.warn(errorMessage, t); boolean isVerbose = executor.getSessionConfig(sessionId).get(SqlClientOptions.VERBOSE); terminal.writer().println(CliStrings.messageError(errorMessage, t, isVerbose).toAnsi()); terminal.flush(); } private void printInfo(String message) { terminal.writer().println(CliStrings.messageInfo(message).toAnsi()); terminal.flush(); } private void printWarning(String message) { terminal.writer().println(CliStrings.messageWarning(message).toAnsi()); terminal.flush(); } private void closeTerminal() { try { terminal.close(); terminal = null; } catch (IOException e) { } } private LineReader createLineReader(Terminal terminal) { LineReader lineReader = LineReaderBuilder.builder() .terminal(terminal) .appName(CliStrings.CLI_NAME) .parser(new SqlMultiLineParser()) .completer(new SqlCompleter(sessionId, executor)) .build(); lineReader.option(LineReader.Option.DISABLE_EVENT_EXPANSION, true); lineReader.setVariable(LineReader.ERRORS, 1); lineReader.option(LineReader.Option.CASE_INSENSITIVE, true); if (Files.exists(historyFilePath) || CliUtils.createFile(historyFilePath)) { String msg = "Command history file path: " + historyFilePath; terminal.writer().println(msg); LOG.info(msg); lineReader.setVariable(LineReader.HISTORY_FILE, historyFilePath); } else { String msg = "Unable to create history file: " + historyFilePath; terminal.writer().println(msg); LOG.warn(msg); } return lineReader; } }
class CliClient implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(CliClient.class); public static final Supplier<Terminal> DEFAULT_TERMINAL_FACTORY = TerminalUtils::createDefaultTerminal; private final Executor executor; private final String sessionId; private final Path historyFilePath; private final String prompt; private final @Nullable MaskingCallback inputTransformer; private final Supplier<Terminal> terminalFactory; private Terminal terminal; private boolean isRunning; private boolean isStatementSetMode; private List<ModifyOperation> statementSetOperations; private static final int PLAIN_TERMINAL_WIDTH = 80; private static final int PLAIN_TERMINAL_HEIGHT = 30; /** * Creates a CLI instance with a custom terminal. Make sure to close the CLI instance afterwards * using {@link */ @VisibleForTesting public CliClient( Supplier<Terminal> terminalFactory, String sessionId, Executor executor, Path historyFilePath, @Nullable MaskingCallback inputTransformer) { this.terminalFactory = terminalFactory; this.sessionId = sessionId; this.executor = executor; this.inputTransformer = inputTransformer; this.historyFilePath = historyFilePath; prompt = new AttributedStringBuilder() .style(AttributedStyle.DEFAULT.foreground(AttributedStyle.GREEN)) .append("Flink SQL") .style(AttributedStyle.DEFAULT) .append("> ") .toAnsi(); } /** * Creates a CLI instance with a prepared terminal. Make sure to close the CLI instance * afterwards using {@link */ public CliClient( Supplier<Terminal> terminalFactory, String sessionId, Executor executor, Path historyFilePath) { this(terminalFactory, sessionId, executor, historyFilePath, null); } public Terminal getTerminal() { return terminal; } public String getSessionId() { return this.sessionId; } public void clearTerminal() { if (isPlainTerminal()) { for (int i = 0; i < 200; i++) { terminal.writer().println(); } } else { terminal.puts(InfoCmp.Capability.clear_screen); } } public boolean isPlainTerminal() { return terminal.getWidth() == 0 && terminal.getHeight() == 0; } public int getWidth() { if (isPlainTerminal()) { return PLAIN_TERMINAL_WIDTH; } return terminal.getWidth(); } public int getHeight() { if (isPlainTerminal()) { return PLAIN_TERMINAL_HEIGHT; } return terminal.getHeight(); } public Executor getExecutor() { return executor; } /** Closes the CLI instance. */ public void close() { if (terminal != null) { closeTerminal(); } } /** Opens the interactive CLI shell. */ public void executeInInteractiveMode() { try { terminal = terminalFactory.get(); executeInteractive(); } finally { closeTerminal(); } } public void executeInNonInteractiveMode(String content) { try { terminal = terminalFactory.get(); executeFile(content, ExecutionMode.NON_INTERACTIVE_EXECUTION); } finally { closeTerminal(); } } public boolean executeInitialization(String content) { try { OutputStream outputStream = new ByteArrayOutputStream(256); terminal = TerminalUtils.createDumbTerminal(outputStream); boolean success = executeFile(content, ExecutionMode.INITIALIZATION); LOG.info(outputStream.toString()); return success; } finally { closeTerminal(); } } enum ExecutionMode { INTERACTIVE_EXECUTION, NON_INTERACTIVE_EXECUTION, INITIALIZATION } /** * Execute statement from the user input and prints status information and/or errors on the * terminal. */ private void executeInteractive() { isRunning = true; LineReader lineReader = createLineReader(terminal); terminal.writer().println(); terminal.writer().flush(); terminal.writer().append(CliStrings.MESSAGE_WELCOME); while (isRunning) { terminal.writer().append("\n"); terminal.flush(); String line; try { line = lineReader.readLine(prompt, null, inputTransformer, null); } catch (UserInterruptException e) { continue; } catch (EndOfFileException | IOError e) { break; } catch (Throwable t) { throw new SqlClientException("Could not read from command line.", t); } if (line == null) { continue; } executeStatement(line, ExecutionMode.INTERACTIVE_EXECUTION); } } /** * Execute content from Sql file and prints status information and/or errors on the terminal. * * @param content SQL file content */ private boolean executeFile(String content, ExecutionMode mode) { terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EXECUTE_FILE).toAnsi()); for (String statement : CliStatementSplitter.splitContent(content)) { terminal.writer() .println(new AttributedString(String.format("%s%s", prompt, statement))); terminal.flush(); if (!executeStatement(statement, mode)) { return false; } } return true; } private void validate(Operation operation, ExecutionMode executionMode) { if (executionMode.equals(ExecutionMode.INITIALIZATION)) { if (!(operation instanceof SetOperation) && !(operation instanceof ResetOperation) && !(operation instanceof CreateOperation) && !(operation instanceof DropOperation) && !(operation instanceof UseOperation) && !(operation instanceof AlterOperation) && !(operation instanceof LoadModuleOperation) && !(operation instanceof UnloadModuleOperation) && !(operation instanceof AddJarOperation) && !(operation instanceof RemoveJarOperation)) { throw new SqlExecutionException( "Unsupported operation in sql init file: " + operation.asSummaryString()); } } else if (executionMode.equals(ExecutionMode.NON_INTERACTIVE_EXECUTION)) { ResultMode mode = executor.getSessionConfig(sessionId).get(EXECUTION_RESULT_MODE); if (operation instanceof QueryOperation && !mode.equals(TABLEAU)) { throw new SqlExecutionException( String.format( "In non-interactive mode, it only supports to use %s as value of %s when execute query. Please add 'SET %s=%s;' in the sql file.", TABLEAU, EXECUTION_RESULT_MODE.key(), EXECUTION_RESULT_MODE.key(), TABLEAU)); } } if (isStatementSetMode) { if (!(operation instanceof CatalogSinkModifyOperation || operation instanceof EndStatementSetOperation)) { throw new SqlExecutionException(MESSAGE_STATEMENT_SET_SQL_EXECUTION_ERROR); } } } private Optional<Operation> parseCommand(String stmt) { stmt = stmt.trim(); if (stmt.endsWith(";")) { stmt = stmt.substring(0, stmt.length() - 1).trim(); } if (stmt.trim().isEmpty()) { return Optional.empty(); } Operation operation = executor.parseStatement(sessionId, stmt); return Optional.of(operation); } private void callOperation(Operation operation, ExecutionMode mode) { validate(operation, mode); if (operation instanceof QuitOperation) { callQuit(); } else if (operation instanceof ClearOperation) { callClear(); } else if (operation instanceof HelpOperation) { callHelp(); } else if (operation instanceof SetOperation) { callSet((SetOperation) operation); } else if (operation instanceof ResetOperation) { callReset((ResetOperation) operation); } else if (operation instanceof CatalogSinkModifyOperation) { callInsert((CatalogSinkModifyOperation) operation); } else if (operation instanceof QueryOperation) { callSelect((QueryOperation) operation); } else if (operation instanceof ExplainOperation) { callExplain((ExplainOperation) operation); } else if (operation instanceof BeginStatementSetOperation) { callBeginStatementSet(); } else if (operation instanceof EndStatementSetOperation) { callEndStatementSet(); } else if (operation instanceof AddJarOperation) { callAddJar((AddJarOperation) operation); } else if (operation instanceof RemoveJarOperation) { callRemoveJar((RemoveJarOperation) operation); } else if (operation instanceof ShowJarsOperation) { callShowJars(); } else if (operation instanceof ShowCreateTableOperation) { callShowCreateTable((ShowCreateTableOperation) operation); } else { executeOperation(operation); } } private void callAddJar(AddJarOperation operation) { String jarPath = operation.getPath(); executor.addJar(sessionId, jarPath); printInfo(CliStrings.MESSAGE_ADD_JAR_STATEMENT); } private void callRemoveJar(RemoveJarOperation operation) { String jarPath = operation.getPath(); executor.removeJar(sessionId, jarPath); printInfo(CliStrings.MESSAGE_REMOVE_JAR_STATEMENT); } private void callShowJars() { List<String> jars = executor.listJars(sessionId); if (CollectionUtils.isEmpty(jars)) { terminal.writer().println("Empty set"); } else { jars.forEach(jar -> terminal.writer().println(jar)); } terminal.flush(); } private void callQuit() { printInfo(CliStrings.MESSAGE_QUIT); isRunning = false; } private void callClear() { clearTerminal(); } private void callReset(ResetOperation resetOperation) { if (!resetOperation.getKey().isPresent()) { executor.resetSessionProperties(sessionId); printInfo(CliStrings.MESSAGE_RESET); } else { String key = resetOperation.getKey().get(); executor.resetSessionProperty(sessionId, key); printInfo(MESSAGE_RESET_KEY); } } private void callSet(SetOperation setOperation) { if (setOperation.getKey().isPresent() && setOperation.getValue().isPresent()) { String key = setOperation.getKey().get().trim(); String value = setOperation.getValue().get().trim(); executor.setSessionProperty(sessionId, key, value); printInfo(MESSAGE_SET_KEY); } else { final Map<String, String> properties = executor.getSessionConfigMap(sessionId); if (properties.isEmpty()) { terminal.writer() .println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi()); } else { List<String> prettyEntries = new ArrayList<>(); for (String key : properties.keySet()) { prettyEntries.add( String.format( "'%s' = '%s'", EncodingUtils.escapeSingleQuotes(key), EncodingUtils.escapeSingleQuotes(properties.get(key)))); } prettyEntries.sort(String::compareTo); prettyEntries.forEach(entry -> terminal.writer().println(entry)); } terminal.flush(); } } private void callHelp() { terminal.writer().println(CliStrings.MESSAGE_HELP); terminal.flush(); } private void callSelect(QueryOperation operation) { final ResultDescriptor resultDesc = executor.executeQuery(sessionId, operation); if (resultDesc.isTableauMode()) { try (CliTableauResultView tableauResultView = new CliTableauResultView(terminal, executor, sessionId, resultDesc)) { tableauResultView.displayResults(); } } else { final CliResultView<?> view; if (resultDesc.isMaterialized()) { view = new CliTableResultView(this, resultDesc); } else { view = new CliChangelogResultView(this, resultDesc); } view.open(); printInfo(CliStrings.MESSAGE_RESULT_QUIT); } } private void callInsert(CatalogSinkModifyOperation operation) { if (isStatementSetMode) { statementSetOperations.add(operation); printInfo(CliStrings.MESSAGE_ADD_STATEMENT_TO_STATEMENT_SET); } else { callInserts(Collections.singletonList(operation)); } } private void callInserts(List<ModifyOperation> operations) { printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT); boolean sync = executor.getSessionConfig(sessionId).get(TABLE_DML_SYNC); if (sync) { printInfo(MESSAGE_WAIT_EXECUTE); } TableResult tableResult = executor.executeModifyOperations(sessionId, operations); checkState(tableResult.getJobClient().isPresent()); if (sync) { terminal.writer().println(CliStrings.messageInfo(MESSAGE_FINISH_STATEMENT).toAnsi()); } else { terminal.writer().println(CliStrings.messageInfo(MESSAGE_STATEMENT_SUBMITTED).toAnsi()); terminal.writer() .println( String.format( "Job ID: %s\n", tableResult.getJobClient().get().getJobID().toString())); } terminal.flush(); } public void callExplain(ExplainOperation operation) { printRawContent(operation); } public void callShowCreateTable(ShowCreateTableOperation operation) { printRawContent(operation); } public void printRawContent(Operation operation) { TableResult tableResult = executor.executeOperation(sessionId, operation); final String explanation = Objects.requireNonNull(tableResult.collect().next().getField(0)).toString(); terminal.writer().println(explanation); terminal.flush(); } private void callBeginStatementSet() { isStatementSetMode = true; statementSetOperations = new ArrayList<>(); printInfo(CliStrings.MESSAGE_BEGIN_STATEMENT_SET); } private void callEndStatementSet() { if (isStatementSetMode) { isStatementSetMode = false; if (!statementSetOperations.isEmpty()) { callInserts(statementSetOperations); } else { printInfo(CliStrings.MESSAGE_NO_STATEMENT_IN_STATEMENT_SET); } statementSetOperations = null; } else { throw new SqlExecutionException(MESSAGE_STATEMENT_SET_END_CALL_ERROR); } } private void executeOperation(Operation operation) { TableResult result = executor.executeOperation(sessionId, operation); if (TABLE_RESULT_OK == result) { printInfo(MESSAGE_EXECUTE_STATEMENT); } else { PrintUtils.printAsTableauForm( result.getResolvedSchema(), result.collect(), terminal.writer(), Integer.MAX_VALUE, "", false, false, CliUtils.getSessionTimeZone(executor.getSessionConfig(sessionId))); terminal.flush(); } } private void printExecutionException(Throwable t) { final String errorMessage = CliStrings.MESSAGE_SQL_EXECUTION_ERROR; LOG.warn(errorMessage, t); boolean isVerbose = executor.getSessionConfig(sessionId).get(SqlClientOptions.VERBOSE); terminal.writer().println(CliStrings.messageError(errorMessage, t, isVerbose).toAnsi()); terminal.flush(); } private void printInfo(String message) { terminal.writer().println(CliStrings.messageInfo(message).toAnsi()); terminal.flush(); } private void printWarning(String message) { terminal.writer().println(CliStrings.messageWarning(message).toAnsi()); terminal.flush(); } private void closeTerminal() { try { terminal.close(); terminal = null; } catch (IOException e) { } } private LineReader createLineReader(Terminal terminal) { LineReader lineReader = LineReaderBuilder.builder() .terminal(terminal) .appName(CliStrings.CLI_NAME) .parser(new SqlMultiLineParser()) .completer(new SqlCompleter(sessionId, executor)) .build(); lineReader.option(LineReader.Option.DISABLE_EVENT_EXPANSION, true); lineReader.setVariable(LineReader.ERRORS, 1); lineReader.option(LineReader.Option.CASE_INSENSITIVE, true); if (Files.exists(historyFilePath) || CliUtils.createFile(historyFilePath)) { String msg = "Command history file path: " + historyFilePath; terminal.writer().println(msg); LOG.info(msg); lineReader.setVariable(LineReader.HISTORY_FILE, historyFilePath); } else { String msg = "Unable to create history file: " + historyFilePath; terminal.writer().println(msg); LOG.warn(msg); } return lineReader; } }
I think this should be testing the json content rather than an exact JSON string. For some examples see: https://github.com/rest-assured/rest-assured/wiki/usage . I think it should be more like: .body("errorMessage", is("Resource Not Found")) .body("existingResourcesDetails.basePath",is("/")) etc
public void testJsonResourceNotFound() { RestAssured.given().accept(ContentType.JSON) .when().get("/not_found") .then() .statusCode(404) .body(Matchers.is("{\"errorMessage\":\"Resource Not Found\",\"existingResourcesDetails\":" + "[{\"basePath\":\"/\",\"calls\":[{\"fullPath\":\"/\",\"method\":\"GET\"}]}]}")); }
.body(Matchers.is("{\"errorMessage\":\"Resource Not Found\",\"existingResourcesDetails\":" +
public void testJsonResourceNotFound() { RestAssured.given().accept(ContentType.JSON) .when().get("/not_found") .then() .statusCode(404) .body("errorMessage", is("Resource Not Found")) .body("existingResourcesDetails[0].basePath", is("/")); }
class NotFoundExceptionMapperTestCase { @RegisterExtension static QuarkusDevModeTest test = new QuarkusDevModeTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(RootResource.class)); @Test public void testHtmlResourceNotFound() { RestAssured.when().get("/not_found") .then() .statusCode(404) .body(Matchers.containsString("<div class=\"component-name\"><h1>Resource Not Found</h1>" + "<h2>REST interface overview</h2></div><h2>/</h2>")); } @Test }
class NotFoundExceptionMapperTestCase { @RegisterExtension static QuarkusDevModeTest test = new QuarkusDevModeTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addClasses(RootResource.class)); @Test public void testHtmlResourceNotFound() { RestAssured.when().get("/not_found") .then() .statusCode(404) .body(Matchers.containsString("<div class=\"component-name\"><h1>Resource Not Found</h1>")); } @Test }
There is also a case to be made for using `getEnvVarOrProperty` here, but let's be cautious and only broaden the scope a little instead of all the way
public Result get() { StartupLogCompressor compressor = new StartupLogCompressor("Checking Docker Environment", Optional.empty(), null, (s) -> s.getName().startsWith("ducttape")); try { Class<?> dockerClientFactoryClass = Thread.currentThread().getContextClassLoader() .loadClass("org.testcontainers.DockerClientFactory"); Object dockerClientFactoryInstance = dockerClientFactoryClass.getMethod("instance").invoke(null); Class<?> configurationClass = Thread.currentThread().getContextClassLoader() .loadClass("org.testcontainers.utility.TestcontainersConfiguration"); Object configurationInstance = configurationClass.getMethod("getInstance").invoke(null); String oldReusePropertyValue = (String) configurationClass .getMethod("getEnvVarOrUserProperty", String.class, String.class) .invoke(configurationInstance, "testcontainers.reuse.enable", "false"); Method updateUserConfigMethod = configurationClass.getMethod("updateUserConfig", String.class, String.class); updateUserConfigMethod.invoke(configurationInstance, "testcontainers.reuse.enable", "true"); boolean isAvailable = (boolean) dockerClientFactoryClass.getMethod("isDockerAvailable") .invoke(dockerClientFactoryInstance); if (!isAvailable) { compressor.closeAndDumpCaptured(); } updateUserConfigMethod.invoke(configurationInstance, "testcontainers.reuse.enable", oldReusePropertyValue); return isAvailable ? Result.AVAILABLE : Result.UNAVAILABLE; } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { if (!silent) { compressor.closeAndDumpCaptured(); LOGGER.debug("Unable to use Testcontainers to determine if Docker is working", e); } return Result.UNKNOWN; } finally { compressor.close(); } }
.getMethod("getEnvVarOrUserProperty", String.class, String.class)
public Result get() { StartupLogCompressor compressor = new StartupLogCompressor("Checking Docker Environment", Optional.empty(), null, (s) -> s.getName().startsWith("ducttape")); try { Class<?> dockerClientFactoryClass = Thread.currentThread().getContextClassLoader() .loadClass("org.testcontainers.DockerClientFactory"); Object dockerClientFactoryInstance = dockerClientFactoryClass.getMethod("instance").invoke(null); Class<?> configurationClass = Thread.currentThread().getContextClassLoader() .loadClass("org.testcontainers.utility.TestcontainersConfiguration"); Object configurationInstance = configurationClass.getMethod("getInstance").invoke(null); String oldReusePropertyValue = (String) configurationClass .getMethod("getEnvVarOrUserProperty", String.class, String.class) .invoke(configurationInstance, "testcontainers.reuse.enable", "false"); Method updateUserConfigMethod = configurationClass.getMethod("updateUserConfig", String.class, String.class); updateUserConfigMethod.invoke(configurationInstance, "testcontainers.reuse.enable", "true"); boolean isAvailable = (boolean) dockerClientFactoryClass.getMethod("isDockerAvailable") .invoke(dockerClientFactoryInstance); if (!isAvailable) { compressor.closeAndDumpCaptured(); } updateUserConfigMethod.invoke(configurationInstance, "testcontainers.reuse.enable", oldReusePropertyValue); return isAvailable ? Result.AVAILABLE : Result.UNAVAILABLE; } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { if (!silent) { compressor.closeAndDumpCaptured(); LOGGER.debug("Unable to use Testcontainers to determine if Docker is working", e); } return Result.UNKNOWN; } finally { compressor.close(); } }
class TestContainersStrategy implements Strategy { private final boolean silent; private TestContainersStrategy(boolean silent) { this.silent = silent; } @Override }
class TestContainersStrategy implements Strategy { private final boolean silent; private TestContainersStrategy(boolean silent) { this.silent = silent; } @Override }
Hm based on that I think what you had before (hard-code "0.11.3") would be preferable.That way the test is deterministic. Let's break it out into a constant `LOCALSTACK_VERSION` or something though. Sorry for the churn :grimacing:
private static void setupLocalstack() throws Exception { System.setProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY, "true"); System.setProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY, "true"); now = Instant.ofEpochMilli(Long.divideUnsigned(Instant.now().getMillis(), 1000)); localstackContainer = new LocalStackContainer("0.11.3") .withServices(LocalStackContainer.Service.KINESIS) .withEnv("USE_SSL", "true") .withStartupAttempts(3); localstackContainer.start(); options.setAwsServiceEndpoint( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getServiceEndpoint() .replace("http", "https")); options.setAwsKinesisRegion( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getSigningRegion()); options.setAwsAccessKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSAccessKeyId()); options.setAwsSecretKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSSecretKey()); options.setNumberOfRecords(1000); options.setNumberOfShards(1); options.setAwsKinesisStream("beam_kinesis_test"); options.setAwsVerifyCertificate(false); createStream(options.getAwsKinesisStream()); }
new LocalStackContainer("0.11.3")
private static void setupLocalstack() { now = Instant.ofEpochMilli(Long.divideUnsigned(now.getMillis(), 1000L)); System.setProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY, "true"); System.setProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY, "true"); localstackContainer = new LocalStackContainer(LOCALSTACK_VERSION) .withServices(LocalStackContainer.Service.KINESIS) .withEnv("USE_SSL", "true") .withStartupAttempts(3); localstackContainer.start(); options.setAwsServiceEndpoint( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getServiceEndpoint() .replace("http", "https")); options.setAwsKinesisRegion( localstackContainer .getEndpointConfiguration(LocalStackContainer.Service.KINESIS) .getSigningRegion()); options.setAwsAccessKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSAccessKeyId()); options.setAwsSecretKey( localstackContainer.getDefaultCredentialsProvider().getCredentials().getAWSSecretKey()); options.setNumberOfRecords(1000); options.setNumberOfShards(1); options.setAwsKinesisStream("beam_kinesis_test"); options.setAwsVerifyCertificate(false); }
class KinesisIOIT implements Serializable { @Rule public TestPipeline pipelineWrite = TestPipeline.create(); @Rule public TestPipeline pipelineRead = TestPipeline.create(); private static LocalStackContainer localstackContainer; private static KinesisTestOptions options; private static Instant now = Instant.now(); @BeforeClass public static void setup() throws Exception { PipelineOptionsFactory.register(KinesisTestOptions.class); options = TestPipeline.testingPipelineOptions().as(KinesisTestOptions.class); if (doUseLocalstack()) { setupLocalstack(); } } @AfterClass public static void teardown() { if (doUseLocalstack()) { System.clearProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY); System.clearProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY); localstackContainer.stop(); } } /** Test which write and then read data for a Kinesis stream. */ @Test public void testWriteThenRead() { runWrite(); runRead(); } /** Write test dataset into Kinesis stream. */ private void runWrite() { pipelineWrite .apply("Generate Sequence", GenerateSequence.from(0).to(options.getNumberOfRecords())) .apply("Prepare TestRows", ParDo.of(new TestRow.DeterministicallyConstructTestRowFn())) .apply("Prepare Kinesis input records", ParDo.of(new ConvertToBytes())) .apply( "Write to Kinesis", KinesisIO.write() .withStreamName(options.getAwsKinesisStream()) .withPartitioner(new RandomPartitioner()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate())); pipelineWrite.run().waitUntilFinish(); } /** Read test dataset from Kinesis stream. */ private void runRead() { PCollection<KinesisRecord> output = pipelineRead.apply( KinesisIO.read() .withStreamName(options.getAwsKinesisStream()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate()) .withMaxNumRecords(options.getNumberOfRecords()) .withMaxReadTime(Duration.standardMinutes(10L)) .withInitialPositionInStream(InitialPositionInStream.AT_TIMESTAMP) .withInitialTimestampInStream(now) .withRequestRecordsLimit(1000)); PAssert.thatSingleton(output.apply("Count All", Count.globally())) .isEqualTo((long) options.getNumberOfRecords()); PCollection<String> consolidatedHashcode = output .apply(ParDo.of(new ExtractDataValues())) .apply("Hash row contents", Combine.globally(new HashingFn()).withoutDefaults()); PAssert.that(consolidatedHashcode) .containsInAnyOrder(TestRow.getExpectedHashForRowCount(options.getNumberOfRecords())); pipelineRead.run().waitUntilFinish(); } /** Necessary setup for localstack environment. */ private static void createStream(String streamName) throws Exception { AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard(); clientBuilder.setCredentials(localstackContainer.getDefaultCredentialsProvider()); clientBuilder.setEndpointConfiguration( localstackContainer.getEndpointConfiguration(LocalStackContainer.Service.KINESIS)); AmazonKinesis client = clientBuilder.build(); client.createStream(streamName, 1); int repeats = 10; for (int i = 0; i <= repeats; ++i) { String streamStatus = client.describeStream(streamName).getStreamDescription().getStreamStatus(); if ("ACTIVE".equals(streamStatus)) { break; } if (i == repeats) { throw new RuntimeException("Unable to initialize stream"); } Thread.sleep(1000L); } } /** Check whether pipeline options were provided. If not, use localstack container. */ private static boolean doUseLocalstack() { return "aws-access-key".equals(options.getAwsAccessKey()) && "aws-secret-key".equals(options.getAwsSecretKey()) && "aws-kinesis-stream".equals(options.getAwsKinesisStream()) && "aws-kinesis-region".equals(options.getAwsKinesisRegion()) && options.getNumberOfShards() == 2 && options.getNumberOfRecords() == 1000 && options.getAwsServiceEndpoint() == null && options.getAwsVerifyCertificate(); } /** Produces test rows. */ private static class ConvertToBytes extends DoFn<TestRow, byte[]> { @ProcessElement public void processElement(ProcessContext c) { c.output(String.valueOf(c.element().name()).getBytes(StandardCharsets.UTF_8)); } } /** Read rows from Table. */ private static class ExtractDataValues extends DoFn<KinesisRecord, String> { @ProcessElement public void processElement(ProcessContext c) { c.output(new String(c.element().getDataAsBytes(), StandardCharsets.UTF_8)); } } private static final class RandomPartitioner implements KinesisPartitioner { @Override public String getPartitionKey(byte[] value) { Random rand = new Random(); int n = rand.nextInt(options.getNumberOfShards()) + 1; return String.valueOf(n); } @Override public String getExplicitHashKey(byte[] value) { return null; } } }
class KinesisIOIT implements Serializable { private static final String LOCALSTACK_VERSION = "0.11.3"; @Rule public TestPipeline pipelineWrite = TestPipeline.create(); @Rule public TestPipeline pipelineRead = TestPipeline.create(); private static KinesisTestOptions options; private static AmazonKinesis kinesisClient; private static LocalStackContainer localstackContainer; private static Instant now = Instant.now(); @BeforeClass public static void setup() throws Exception { PipelineOptionsFactory.register(KinesisTestOptions.class); options = TestPipeline.testingPipelineOptions().as(KinesisTestOptions.class); if (options.getUseLocalstack()) { setupLocalstack(); kinesisClient = createKinesisClient(); createStream(options.getAwsKinesisStream()); } } @AfterClass public static void teardown() { if (options.getUseLocalstack()) { kinesisClient.deleteStream(options.getAwsKinesisStream()); System.clearProperty(SDKGlobalConfiguration.DISABLE_CERT_CHECKING_SYSTEM_PROPERTY); System.clearProperty(SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY); localstackContainer.stop(); } } /** Test which write and then read data for a Kinesis stream. */ @Test public void testWriteThenRead() { runWrite(); runRead(); } /** Write test dataset into Kinesis stream. */ private void runWrite() { pipelineWrite .apply("Generate Sequence", GenerateSequence.from(0).to(options.getNumberOfRecords())) .apply("Prepare TestRows", ParDo.of(new TestRow.DeterministicallyConstructTestRowFn())) .apply("Prepare Kinesis input records", ParDo.of(new ConvertToBytes())) .apply( "Write to Kinesis", KinesisIO.write() .withStreamName(options.getAwsKinesisStream()) .withPartitioner(new RandomPartitioner()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate())); pipelineWrite.run().waitUntilFinish(); } /** Read test dataset from Kinesis stream. */ private void runRead() { PCollection<KinesisRecord> output = pipelineRead.apply( KinesisIO.read() .withStreamName(options.getAwsKinesisStream()) .withAWSClientsProvider( options.getAwsAccessKey(), options.getAwsSecretKey(), Regions.fromName(options.getAwsKinesisRegion()), options.getAwsServiceEndpoint(), options.getAwsVerifyCertificate()) .withMaxNumRecords(options.getNumberOfRecords()) .withMaxReadTime(Duration.standardMinutes(10L)) .withInitialPositionInStream(InitialPositionInStream.AT_TIMESTAMP) .withInitialTimestampInStream(now) .withRequestRecordsLimit(1000)); PAssert.thatSingleton(output.apply("Count All", Count.globally())) .isEqualTo((long) options.getNumberOfRecords()); PCollection<String> consolidatedHashcode = output .apply(ParDo.of(new ExtractDataValues())) .apply("Hash row contents", Combine.globally(new HashingFn()).withoutDefaults()); PAssert.that(consolidatedHashcode) .containsInAnyOrder(TestRow.getExpectedHashForRowCount(options.getNumberOfRecords())); pipelineRead.run().waitUntilFinish(); } /** Necessary setup for localstack environment. */ private static AmazonKinesis createKinesisClient() { AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard(); AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider( new BasicAWSCredentials(options.getAwsAccessKey(), options.getAwsSecretKey())); clientBuilder.setCredentials(credentialsProvider); if (options.getAwsServiceEndpoint() != null) { AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration( options.getAwsServiceEndpoint(), options.getAwsKinesisRegion()); clientBuilder.setEndpointConfiguration(endpointConfiguration); } else { clientBuilder.setRegion(options.getAwsKinesisRegion()); } return clientBuilder.build(); } private static void createStream(String streamName) throws Exception { kinesisClient.createStream(streamName, 1); int repeats = 10; for (int i = 0; i <= repeats; ++i) { String streamStatus = kinesisClient.describeStream(streamName).getStreamDescription().getStreamStatus(); if ("ACTIVE".equals(streamStatus)) { break; } if (i == repeats) { throw new RuntimeException("Unable to initialize stream"); } Thread.sleep(1000L); } } /** Produces test rows. */ private static class ConvertToBytes extends DoFn<TestRow, byte[]> { @ProcessElement public void processElement(ProcessContext c) { c.output(String.valueOf(c.element().name()).getBytes(StandardCharsets.UTF_8)); } } /** Read rows from Table. */ private static class ExtractDataValues extends DoFn<KinesisRecord, String> { @ProcessElement public void processElement(ProcessContext c) { c.output(new String(c.element().getDataAsBytes(), StandardCharsets.UTF_8)); } } private static final class RandomPartitioner implements KinesisPartitioner { @Override public String getPartitionKey(byte[] value) { Random rand = new Random(); int n = rand.nextInt(options.getNumberOfShards()) + 1; return String.valueOf(n); } @Override public String getExplicitHashKey(byte[] value) { return null; } } }
I don't think we want this, doesn't this essentially remove batching?
public void processElement(ProcessContext c, BoundedWindow window) throws Exception { for (Future<?> f = outstandingWrites.peek(); f != null && f.isDone(); f = outstandingWrites.peek()) { outstandingWrites.remove().get(); } checkForFailures(); KV<ByteString, Iterable<Mutation>> record = c.element(); Instant writeStart = Instant.now(); pendingThrottlingMsecs = 0; CompletableFuture<Void> f = bigtableWriter .writeRecord(record) .handle(handleMutationException(record, window, writeStart)); outstandingWrites.add(f); if (pendingThrottlingMsecs > 0) { throttlingMsecs.inc(pendingThrottlingMsecs); } ++recordsWritten; seenWindows.compute(window, (key, count) -> (count != null ? count : 0) + 1); }
for (Future<?> f = outstandingWrites.peek();
public void processElement(ProcessContext c, BoundedWindow window) throws Exception { drainCompletedElementFutures(); checkForFailures(); KV<ByteString, Iterable<Mutation>> record = c.element(); Instant writeStart = Instant.now(); pendingThrottlingMsecs = 0; CompletableFuture<Void> f = bigtableWriter .writeRecord(record) .handle(handleMutationException(record, window, writeStart)); outstandingWrites.add(f); if (pendingThrottlingMsecs > 0) { throttlingMsecs.inc(pendingThrottlingMsecs); } ++recordsWritten; seenWindows.compute(window, (key, count) -> (count != null ? count : 0) + 1); }
class BigtableWriterFn extends DoFn<KV<ByteString, Iterable<Mutation>>, BigtableWriteResult> { private final BigtableServiceFactory factory; private final BigtableServiceFactory.ConfigId id; private final Coder<KV<ByteString, Iterable<Mutation>>> inputCoder; private final BadRecordRouter badRecordRouter; private final Counter throttlingMsecs = Metrics.counter(Metrics.THROTTLE_TIME_NAMESPACE, Metrics.THROTTLE_TIME_COUNTER_NAME); private final int throttleReportThresMsecs; private transient Set<KV<BigtableWriteException, BoundedWindow>> badRecords = null; private transient long pendingThrottlingMsecs; private transient boolean reportedLineage; @Nullable private BigtableServiceEntry serviceEntry; private transient Queue<Future<?>> outstandingWrites; BigtableWriterFn( BigtableServiceFactory factory, BigtableConfig bigtableConfig, BigtableWriteOptions writeOptions, Coder<KV<ByteString, Iterable<Mutation>>> inputCoder, BadRecordRouter badRecordRouter) { this.factory = factory; this.config = bigtableConfig; this.writeOptions = writeOptions; this.inputCoder = inputCoder; this.badRecordRouter = badRecordRouter; this.failures = new ConcurrentLinkedQueue<>(); this.id = factory.newId(); throttleReportThresMsecs = firstNonNull(writeOptions.getThrottlingReportTargetMs(), 180_000); LOG.debug("Created Bigtable Write Fn with writeOptions {} ", writeOptions); } @StartBundle public void startBundle(StartBundleContext c) throws IOException { recordsWritten = 0; this.seenWindows = Maps.newHashMapWithExpectedSize(1); if (serviceEntry == null) { serviceEntry = factory.getServiceForWriting(id, config, writeOptions, c.getPipelineOptions()); } if (bigtableWriter == null) { bigtableWriter = serviceEntry.getService().openForWriting(writeOptions); } badRecords = new HashSet<>(); outstandingWrites = new LinkedBlockingDeque<>(); } @ProcessElement private BiFunction<MutateRowResponse, Throwable, Void> handleMutationException( KV<ByteString, Iterable<Mutation>> record, BoundedWindow window, Instant writeStart) { return (MutateRowResponse result, Throwable exception) -> { if (exception != null) { if (isDataException(exception)) { retryIndividualRecord(record, window); } else { boolean isResourceException = false; if (exception instanceof StatusRuntimeException) { StatusRuntimeException se = (StatusRuntimeException) exception; if (io.grpc.Status.DEADLINE_EXCEEDED.equals(se.getStatus()) || io.grpc.Status.RESOURCE_EXHAUSTED.equals(se.getStatus())) { isResourceException = true; } } else if (exception instanceof DeadlineExceededException || exception instanceof ResourceExhaustedException) { isResourceException = true; } if (isResourceException) { pendingThrottlingMsecs = new Duration(writeStart, Instant.now()).getMillis(); } failures.add(new BigtableWriteException(record, exception)); } } else { if (throttleReportThresMsecs > 0) { long excessTime = new Duration(writeStart, Instant.now()).getMillis() - throttleReportThresMsecs; if (excessTime > 0) { pendingThrottlingMsecs = excessTime; } } } return null; }; } private void retryIndividualRecord( KV<ByteString, Iterable<Mutation>> record, BoundedWindow window) { try { bigtableWriter.writeSingleRecord(record); } catch (ApiException e) { if (isDataException(e)) { badRecords.add(KV.of(new BigtableWriteException(record, e), window)); } else { failures.add(new BigtableWriteException(record, e)); } } } private static boolean isDataException(Throwable e) { if (e instanceof ApiException && !((ApiException) e).isRetryable()) { return e instanceof NotFoundException || e instanceof InvalidArgumentException; } return false; } @FinishBundle public void finishBundle(FinishBundleContext c) throws Exception { List<Throwable> elementErrors = new ArrayList<>(); if (bigtableWriter != null) { Instant closeStart = Instant.now(); try { bigtableWriter.close(); } catch (IOException e) { if (!(e.getCause() instanceof BatchingException)) { throttlingMsecs.inc(new Duration(closeStart, Instant.now()).getMillis()); throw e; } } for (Future<?> f = outstandingWrites.poll(); f != null; f = outstandingWrites.poll()) { try { f.get(1, TimeUnit.MINUTES); } catch (ExecutionException e) { elementErrors.add(e.getCause()); } catch (TimeoutException e) { throw new IllegalStateException( "Unexpected timeout waiting for element future to resolve after the writer was closed", e); } } if (throttleReportThresMsecs > 0) { long excessTime = new Duration(closeStart, Instant.now()).getMillis() - throttleReportThresMsecs; if (excessTime > 0) { throttlingMsecs.inc(excessTime); } } if (!reportedLineage) { bigtableWriter.reportLineage(); reportedLineage = true; } bigtableWriter = null; } for (KV<BigtableWriteException, BoundedWindow> badRecord : badRecords) { try { badRecordRouter.route( c, badRecord.getKey().getRecord(), inputCoder, (Exception) badRecord.getKey().getCause(), "Failed to write malformed mutation to Bigtable", badRecord.getValue()); } catch (Exception e) { failures.add(badRecord.getKey()); } } checkForFailures(); if (!elementErrors.isEmpty() && failures.isEmpty()) { StringBuilder sb = new StringBuilder().append("Unexpected element failures:\n"); for (Throwable elementError : elementErrors) { sb.append(elementError.getMessage()); if (elementError.getCause() != null) { sb.append(": ").append(elementError.getCause().getMessage()); } sb.append("\n"); } throw new IllegalStateException(sb.toString()); } LOG.debug("Wrote {} records", recordsWritten); for (Map.Entry<BoundedWindow, Long> entry : seenWindows.entrySet()) { c.output( BigtableWriteResult.create(entry.getValue()), entry.getKey().maxTimestamp(), entry.getKey()); } } @Teardown public void tearDown() throws IOException { try { if (bigtableWriter != null) { bigtableWriter.close(); bigtableWriter = null; } } finally { if (serviceEntry != null) { serviceEntry.close(); serviceEntry = null; } } } @Override public void populateDisplayData(DisplayData.Builder builder) { config.populateDisplayData(builder); } private final BigtableConfig config; private final BigtableWriteOptions writeOptions; private BigtableService.Writer bigtableWriter; private long recordsWritten; private final ConcurrentLinkedQueue<BigtableWriteException> failures; private Map<BoundedWindow, Long> seenWindows; /** If any write has asynchronously failed, fail the bundle with a useful error. */ private void checkForFailures() throws IOException { if (failures.isEmpty()) { return; } StringBuilder logEntry = new StringBuilder(); int i = 0; List<BigtableWriteException> suppressed = Lists.newArrayList(); for (; i < 10 && !failures.isEmpty(); ++i) { BigtableWriteException exc = failures.remove(); logEntry.append("\n").append(exc.getMessage()); if (exc.getCause() != null) { logEntry.append(": ").append(exc.getCause().getMessage()); } suppressed.add(exc); } String message = String.format( "At least %d errors occurred writing to Bigtable. First %d errors: %s", i + failures.size(), i, logEntry.toString()); LOG.error(message); IOException exception = new IOException(message); for (BigtableWriteException e : suppressed) { exception.addSuppressed(e); } throw exception; } }
class BigtableWriterFn extends DoFn<KV<ByteString, Iterable<Mutation>>, BigtableWriteResult> { private final BigtableServiceFactory factory; private final BigtableServiceFactory.ConfigId id; private final Coder<KV<ByteString, Iterable<Mutation>>> inputCoder; private final BadRecordRouter badRecordRouter; private final Counter throttlingMsecs = Metrics.counter(Metrics.THROTTLE_TIME_NAMESPACE, Metrics.THROTTLE_TIME_COUNTER_NAME); private final int throttleReportThresMsecs; private transient ConcurrentLinkedQueue<KV<BigtableWriteException, BoundedWindow>> badRecords = null; private transient long pendingThrottlingMsecs; private transient boolean reportedLineage; @Nullable private BigtableServiceEntry serviceEntry; private transient Queue<CompletableFuture<?>> outstandingWrites; BigtableWriterFn( BigtableServiceFactory factory, BigtableConfig bigtableConfig, BigtableWriteOptions writeOptions, Coder<KV<ByteString, Iterable<Mutation>>> inputCoder, BadRecordRouter badRecordRouter) { this.factory = factory; this.config = bigtableConfig; this.writeOptions = writeOptions; this.inputCoder = inputCoder; this.badRecordRouter = badRecordRouter; this.failures = new ConcurrentLinkedQueue<>(); this.id = factory.newId(); throttleReportThresMsecs = firstNonNull(writeOptions.getThrottlingReportTargetMs(), 180_000); LOG.debug("Created Bigtable Write Fn with writeOptions {} ", writeOptions); } @StartBundle public void startBundle(StartBundleContext c) throws IOException { recordsWritten = 0; this.seenWindows = Maps.newHashMapWithExpectedSize(1); if (serviceEntry == null) { serviceEntry = factory.getServiceForWriting(id, config, writeOptions, c.getPipelineOptions()); } if (bigtableWriter == null) { bigtableWriter = serviceEntry.getService().openForWriting(writeOptions); } badRecords = new ConcurrentLinkedQueue<>(); outstandingWrites = new ArrayDeque<>(); } @ProcessElement private void drainCompletedElementFutures() throws ExecutionException, InterruptedException { for (Future<?> f = outstandingWrites.peek(); f != null && f.isDone(); f = outstandingWrites.peek()) { outstandingWrites.remove().get(); } } private BiFunction<MutateRowResponse, Throwable, Void> handleMutationException( KV<ByteString, Iterable<Mutation>> record, BoundedWindow window, Instant writeStart) { return (MutateRowResponse result, Throwable exception) -> { if (exception != null) { if (isDataException(exception)) { retryIndividualRecord(record, window); } else { boolean isResourceException = false; if (exception instanceof StatusRuntimeException) { StatusRuntimeException se = (StatusRuntimeException) exception; if (io.grpc.Status.DEADLINE_EXCEEDED.equals(se.getStatus()) || io.grpc.Status.RESOURCE_EXHAUSTED.equals(se.getStatus())) { isResourceException = true; } } else if (exception instanceof DeadlineExceededException || exception instanceof ResourceExhaustedException) { isResourceException = true; } if (isResourceException) { pendingThrottlingMsecs = new Duration(writeStart, Instant.now()).getMillis(); } failures.add(new BigtableWriteException(record, exception)); } } else { if (throttleReportThresMsecs > 0) { long excessTime = new Duration(writeStart, Instant.now()).getMillis() - throttleReportThresMsecs; if (excessTime > 0) { pendingThrottlingMsecs = excessTime; } } } return null; }; } private void retryIndividualRecord( KV<ByteString, Iterable<Mutation>> record, BoundedWindow window) { try { bigtableWriter.writeSingleRecord(record); } catch (Throwable e) { if (isDataException(e)) { badRecords.add(KV.of(new BigtableWriteException(record, e), window)); } else { failures.add(new BigtableWriteException(record, e)); } } } private static boolean isDataException(Throwable e) { if (e instanceof ApiException && !((ApiException) e).isRetryable()) { return e instanceof NotFoundException || e instanceof InvalidArgumentException; } return false; } @FinishBundle public void finishBundle(FinishBundleContext c) throws Exception { if (bigtableWriter != null) { Instant closeStart = Instant.now(); try { bigtableWriter.close(); } catch (IOException e) { if (!(e.getCause() instanceof BatchingException)) { throttlingMsecs.inc(new Duration(closeStart, Instant.now()).getMillis()); throw e; } } try { CompletableFuture.allOf(outstandingWrites.toArray(new CompletableFuture<?>[0])) .get(1, TimeUnit.MINUTES); } catch (TimeoutException e) { throw new IllegalStateException( "Unexpected timeout waiting for element future to resolve after the writer was closed", e); } if (throttleReportThresMsecs > 0) { long excessTime = new Duration(closeStart, Instant.now()).getMillis() - throttleReportThresMsecs; if (excessTime > 0) { throttlingMsecs.inc(excessTime); } } if (!reportedLineage) { bigtableWriter.reportLineage(); reportedLineage = true; } bigtableWriter = null; } for (KV<BigtableWriteException, BoundedWindow> badRecord : badRecords) { try { badRecordRouter.route( c, badRecord.getKey().getRecord(), inputCoder, (Exception) badRecord.getKey().getCause(), "Failed to write malformed mutation to Bigtable", badRecord.getValue()); } catch (Exception e) { failures.add(badRecord.getKey()); } } checkForFailures(); LOG.debug("Wrote {} records", recordsWritten); for (Map.Entry<BoundedWindow, Long> entry : seenWindows.entrySet()) { c.output( BigtableWriteResult.create(entry.getValue()), entry.getKey().maxTimestamp(), entry.getKey()); } } @Teardown public void tearDown() throws IOException { try { if (bigtableWriter != null) { bigtableWriter.close(); bigtableWriter = null; } } finally { if (serviceEntry != null) { serviceEntry.close(); serviceEntry = null; } } } @Override public void populateDisplayData(DisplayData.Builder builder) { config.populateDisplayData(builder); } private final BigtableConfig config; private final BigtableWriteOptions writeOptions; private BigtableService.Writer bigtableWriter; private long recordsWritten; private final ConcurrentLinkedQueue<BigtableWriteException> failures; private Map<BoundedWindow, Long> seenWindows; /** If any write has asynchronously failed, fail the bundle with a useful error. */ private void checkForFailures() throws IOException { if (failures.isEmpty()) { return; } StringBuilder logEntry = new StringBuilder(); int i = 0; List<BigtableWriteException> suppressed = Lists.newArrayList(); for (; i < 10 && !failures.isEmpty(); ++i) { BigtableWriteException exc = failures.remove(); logEntry.append("\n").append(exc.getMessage()); if (exc.getCause() != null) { logEntry.append(": ").append(exc.getCause().getMessage()); } suppressed.add(exc); } String message = String.format( "At least %d errors occurred writing to Bigtable. First %d errors: %s", i + failures.size(), i, logEntry.toString()); LOG.error(message); IOException exception = new IOException(message); for (BigtableWriteException e : suppressed) { exception.addSuppressed(e); } throw exception; } }
Shouldn't we have separate checks for these? In case decode started, and never finished. It may provide insights on gaps in the SDK or decode process or any internal errors. Like we have for other events. where we see null end time.
private void messageReceived(final ChannelHandlerContext context, final RntbdResponse response) { final Long transportRequestId = response.getTransportRequestId(); if (transportRequestId == null) { reportIssue(context, "response ignored because its transportRequestId is missing: {}", response); return; } final RntbdRequestRecord requestRecord = this.pendingRequests.get(transportRequestId); if (requestRecord == null) { logger.debug("response {} ignored because its requestRecord is missing: {}", transportRequestId, response); return; } if (response.getDecodeEndTime() != null && response.getDecodeStartTime() != null) { requestRecord.stage(RntbdRequestRecord.Stage.DECODE_STARTED, response.getDecodeStartTime()); requestRecord.stage(RntbdRequestRecord.Stage.DECODE_COMPLETED, response.getDecodeEndTime()); } requestRecord.responseLength(response.getMessageLength()); requestRecord.stage(RntbdRequestRecord.Stage.RECEIVED); final HttpResponseStatus status = response.getStatus(); final UUID activityId = response.getActivityId(); final int statusCode = status.code(); if ((HttpResponseStatus.OK.code() <= statusCode && statusCode < HttpResponseStatus.MULTIPLE_CHOICES.code()) || statusCode == HttpResponseStatus.NOT_MODIFIED.code()) { final StoreResponse storeResponse = response.toStoreResponse(this.contextFuture.getNow(null)); requestRecord.complete(storeResponse); } else { final CosmosException cause; final long lsn = response.getHeader(RntbdResponseHeader.LSN); final String partitionKeyRangeId = response.getHeader(RntbdResponseHeader.PartitionKeyRangeId); final CosmosError error = response.hasPayload() ? new CosmosError(RntbdObjectMapper.readTree(response)) : new CosmosError(Integer.toString(statusCode), status.reasonPhrase(), status.codeClass().name()); final Map<String, String> responseHeaders = response.getHeaders().asMap( this.rntbdContext().orElseThrow(IllegalStateException::new), activityId ); final String resourceAddress = requestRecord.args().physicalAddress() != null ? requestRecord.args().physicalAddress().toString() : null; switch (status.code()) { case StatusCodes.BADREQUEST: cause = new BadRequestException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.CONFLICT: cause = new ConflictException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.FORBIDDEN: cause = new ForbiddenException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.GONE: final int subStatusCode = Math.toIntExact(response.getHeader(RntbdResponseHeader.SubStatus)); switch (subStatusCode) { case SubStatusCodes.COMPLETING_SPLIT: cause = new PartitionKeyRangeIsSplittingException(error, lsn, partitionKeyRangeId, responseHeaders); break; case SubStatusCodes.COMPLETING_PARTITION_MIGRATION: cause = new PartitionIsMigratingException(error, lsn, partitionKeyRangeId, responseHeaders); break; case SubStatusCodes.NAME_CACHE_IS_STALE: cause = new InvalidPartitionException(error, lsn, partitionKeyRangeId, responseHeaders); break; case SubStatusCodes.PARTITION_KEY_RANGE_GONE: cause = new PartitionKeyRangeGoneException(error, lsn, partitionKeyRangeId, responseHeaders); break; default: GoneException goneExceptionFromService = new GoneException(error, lsn, partitionKeyRangeId, responseHeaders); goneExceptionFromService.setIsBasedOn410ResponseFromService(); cause = goneExceptionFromService; break; } break; case StatusCodes.INTERNAL_SERVER_ERROR: cause = new InternalServerErrorException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.LOCKED: cause = new LockedException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.METHOD_NOT_ALLOWED: cause = new MethodNotAllowedException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.NOTFOUND: cause = new NotFoundException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.PRECONDITION_FAILED: cause = new PreconditionFailedException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.REQUEST_ENTITY_TOO_LARGE: cause = new RequestEntityTooLargeException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.REQUEST_TIMEOUT: Exception inner = new RequestTimeoutException(error, lsn, partitionKeyRangeId, responseHeaders); cause = new GoneException(resourceAddress, error, lsn, partitionKeyRangeId, responseHeaders, inner); break; case StatusCodes.RETRY_WITH: cause = new RetryWithException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.SERVICE_UNAVAILABLE: cause = new ServiceUnavailableException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.TOO_MANY_REQUESTS: cause = new RequestRateTooLargeException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.UNAUTHORIZED: cause = new UnauthorizedException(error, lsn, partitionKeyRangeId, responseHeaders); break; default: cause = BridgeInternal.createCosmosException(resourceAddress, status.code(), error, responseHeaders); break; } BridgeInternal.setResourceAddress(cause, resourceAddress); requestRecord.completeExceptionally(cause); } }
if (response.getDecodeEndTime() != null && response.getDecodeStartTime() != null) {
private void messageReceived(final ChannelHandlerContext context, final RntbdResponse response) { final Long transportRequestId = response.getTransportRequestId(); if (transportRequestId == null) { reportIssue(context, "response ignored because its transportRequestId is missing: {}", response); return; } final RntbdRequestRecord requestRecord = this.pendingRequests.get(transportRequestId); if (requestRecord == null) { logger.debug("response {} ignored because its requestRecord is missing: {}", transportRequestId, response); return; } requestRecord.stage(RntbdRequestRecord.Stage.DECODE_STARTED, response.getDecodeStartTime()); requestRecord.stage( RntbdRequestRecord.Stage.RECEIVED, response.getDecodeEndTime() != null ? response.getDecodeEndTime() : Instant.now()); requestRecord.responseLength(response.getMessageLength()); final HttpResponseStatus status = response.getStatus(); final UUID activityId = response.getActivityId(); final int statusCode = status.code(); if ((HttpResponseStatus.OK.code() <= statusCode && statusCode < HttpResponseStatus.MULTIPLE_CHOICES.code()) || statusCode == HttpResponseStatus.NOT_MODIFIED.code()) { final StoreResponse storeResponse = response.toStoreResponse(this.contextFuture.getNow(null)); requestRecord.complete(storeResponse); } else { final CosmosException cause; final long lsn = response.getHeader(RntbdResponseHeader.LSN); final String partitionKeyRangeId = response.getHeader(RntbdResponseHeader.PartitionKeyRangeId); final CosmosError error = response.hasPayload() ? new CosmosError(RntbdObjectMapper.readTree(response)) : new CosmosError(Integer.toString(statusCode), status.reasonPhrase(), status.codeClass().name()); final Map<String, String> responseHeaders = response.getHeaders().asMap( this.rntbdContext().orElseThrow(IllegalStateException::new), activityId ); final String resourceAddress = requestRecord.args().physicalAddress() != null ? requestRecord.args().physicalAddress().toString() : null; switch (status.code()) { case StatusCodes.BADREQUEST: cause = new BadRequestException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.CONFLICT: cause = new ConflictException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.FORBIDDEN: cause = new ForbiddenException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.GONE: final int subStatusCode = Math.toIntExact(response.getHeader(RntbdResponseHeader.SubStatus)); switch (subStatusCode) { case SubStatusCodes.COMPLETING_SPLIT: cause = new PartitionKeyRangeIsSplittingException(error, lsn, partitionKeyRangeId, responseHeaders); break; case SubStatusCodes.COMPLETING_PARTITION_MIGRATION: cause = new PartitionIsMigratingException(error, lsn, partitionKeyRangeId, responseHeaders); break; case SubStatusCodes.NAME_CACHE_IS_STALE: cause = new InvalidPartitionException(error, lsn, partitionKeyRangeId, responseHeaders); break; case SubStatusCodes.PARTITION_KEY_RANGE_GONE: cause = new PartitionKeyRangeGoneException(error, lsn, partitionKeyRangeId, responseHeaders); break; default: GoneException goneExceptionFromService = new GoneException(error, lsn, partitionKeyRangeId, responseHeaders); goneExceptionFromService.setIsBasedOn410ResponseFromService(); cause = goneExceptionFromService; break; } break; case StatusCodes.INTERNAL_SERVER_ERROR: cause = new InternalServerErrorException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.LOCKED: cause = new LockedException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.METHOD_NOT_ALLOWED: cause = new MethodNotAllowedException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.NOTFOUND: cause = new NotFoundException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.PRECONDITION_FAILED: cause = new PreconditionFailedException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.REQUEST_ENTITY_TOO_LARGE: cause = new RequestEntityTooLargeException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.REQUEST_TIMEOUT: Exception inner = new RequestTimeoutException(error, lsn, partitionKeyRangeId, responseHeaders); cause = new GoneException(resourceAddress, error, lsn, partitionKeyRangeId, responseHeaders, inner); break; case StatusCodes.RETRY_WITH: cause = new RetryWithException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.SERVICE_UNAVAILABLE: cause = new ServiceUnavailableException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.TOO_MANY_REQUESTS: cause = new RequestRateTooLargeException(error, lsn, partitionKeyRangeId, responseHeaders); break; case StatusCodes.UNAUTHORIZED: cause = new UnauthorizedException(error, lsn, partitionKeyRangeId, responseHeaders); break; default: cause = BridgeInternal.createCosmosException(resourceAddress, status.code(), error, responseHeaders); break; } BridgeInternal.setResourceAddress(cause, resourceAddress); requestRecord.completeExceptionally(cause); } }
class RntbdRequestManager implements ChannelHandler, ChannelInboundHandler, ChannelOutboundHandler { private static final ClosedChannelException ON_CHANNEL_UNREGISTERED = ThrowableUtil.unknownStackTrace(new ClosedChannelException(), RntbdRequestManager.class, "channelUnregistered"); private static final ClosedChannelException ON_CLOSE = ThrowableUtil.unknownStackTrace(new ClosedChannelException(), RntbdRequestManager.class, "close"); private static final ClosedChannelException ON_DEREGISTER = ThrowableUtil.unknownStackTrace(new ClosedChannelException(), RntbdRequestManager.class, "deregister"); private static final EventExecutor requestExpirationExecutor = new DefaultEventExecutor(new RntbdThreadFactory( "request-expirator", true, Thread.NORM_PRIORITY)); private static final Logger logger = LoggerFactory.getLogger(RntbdRequestManager.class); private final CompletableFuture<RntbdContext> contextFuture = new CompletableFuture<>(); private final CompletableFuture<RntbdContextRequest> contextRequestFuture = new CompletableFuture<>(); private final ChannelHealthChecker healthChecker; private final int pendingRequestLimit; private final ConcurrentHashMap<Long, RntbdRequestRecord> pendingRequests; private final Timestamps timestamps = new Timestamps(); private boolean closingExceptionally = false; private CoalescingBufferQueue pendingWrites; public RntbdRequestManager(final ChannelHealthChecker healthChecker, final int pendingRequestLimit) { checkArgument(pendingRequestLimit > 0, "pendingRequestLimit: %s", pendingRequestLimit); checkNotNull(healthChecker, "healthChecker"); this.pendingRequests = new ConcurrentHashMap<>(pendingRequestLimit); this.pendingRequestLimit = pendingRequestLimit; this.healthChecker = healthChecker; } /** * Gets called after the {@link ChannelHandler} was added to the actual context and it's ready to handle events. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void handlerAdded(final ChannelHandlerContext context) { this.traceOperation(context, "handlerAdded"); } /** * Gets called after the {@link ChannelHandler} was removed from the actual context and it doesn't handle events * anymore. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void handlerRemoved(final ChannelHandlerContext context) { this.traceOperation(context, "handlerRemoved"); } /** * The {@link Channel} of the {@link ChannelHandlerContext} is now active * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelActive(final ChannelHandlerContext context) { this.traceOperation(context, "channelActive"); context.fireChannelActive(); } /** * The {@link Channel} of the {@link ChannelHandlerContext} was registered and has reached the end of its lifetime * <p> * This method will only be called after the channel is closed. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelInactive(final ChannelHandlerContext context) { this.traceOperation(context, "channelInactive"); context.fireChannelInactive(); } /** * The {@link Channel} of the {@link ChannelHandlerContext} has read a message from its peer. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs. * @param message The message read. */ @Override public void channelRead(final ChannelHandlerContext context, final Object message) { this.traceOperation(context, "channelRead"); try { if (message.getClass() == RntbdResponse.class) { try { this.messageReceived(context, (RntbdResponse) message); } catch (CorruptedFrameException error) { this.exceptionCaught(context, error); } catch (Throwable throwable) { reportIssue(context, "{} ", message, throwable); this.exceptionCaught(context, throwable); } } else { final IllegalStateException error = new IllegalStateException( lenientFormat("expected message of %s, not %s: %s", RntbdResponse.class, message.getClass(), message)); reportIssue(context, "", error); this.exceptionCaught(context, error); } } finally { if (message instanceof ReferenceCounted) { boolean released = ((ReferenceCounted) message).release(); reportIssueUnless(released, context, "failed to release message: {}", message); } } } /** * The {@link Channel} of the {@link ChannelHandlerContext} has fully consumed the most-recent message read. * <p> * If {@link ChannelOption * {@link Channel} will be made until {@link ChannelHandlerContext * for outbound messages to be written. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelReadComplete(final ChannelHandlerContext context) { this.traceOperation(context, "channelReadComplete"); this.timestamps.channelReadCompleted(); context.fireChannelReadComplete(); } /** * Constructs a {@link CoalescingBufferQueue} for buffering encoded requests until we have an {@link RntbdRequest} * <p> * This method then calls {@link ChannelHandlerContext * {@link ChannelInboundHandler} in the {@link ChannelPipeline}. * <p> * Sub-classes may override this method to change behavior. * * @param context the {@link ChannelHandlerContext} for which the bind operation is made */ @Override public void channelRegistered(final ChannelHandlerContext context) { this.traceOperation(context, "channelRegistered"); reportIssueUnless(this.pendingWrites == null, context, "pendingWrites: {}", pendingWrites); this.pendingWrites = new CoalescingBufferQueue(context.channel()); context.fireChannelRegistered(); } /** * The {@link Channel} of the {@link ChannelHandlerContext} was unregistered from its {@link EventLoop} * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelUnregistered(final ChannelHandlerContext context) { this.traceOperation(context, "channelUnregistered"); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, ON_CHANNEL_UNREGISTERED); } else { logger.debug("{} channelUnregistered exceptionally", context); } context.fireChannelUnregistered(); } /** * Gets called once the writable state of a {@link Channel} changed. You can check the state with * {@link Channel * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelWritabilityChanged(final ChannelHandlerContext context) { this.traceOperation(context, "channelWritabilityChanged"); context.fireChannelWritabilityChanged(); } /** * Processes {@link ChannelHandlerContext * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs * @param cause Exception caught */ @Override @SuppressWarnings("deprecation") public void exceptionCaught(final ChannelHandlerContext context, final Throwable cause) { this.traceOperation(context, "exceptionCaught", cause); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, cause); logger.debug("{} closing due to:", context, cause); context.flush().close(); } } /** * Processes inbound events triggered by channel handlers in the {@link RntbdClientChannelHandler} pipeline * <p> * All but inbound request management events are ignored. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs * @param event An object representing a user event */ @Override public void userEventTriggered(final ChannelHandlerContext context, final Object event) { this.traceOperation(context, "userEventTriggered", event); try { if (event instanceof IdleStateEvent) { this.healthChecker.isHealthy(context.channel()).addListener((Future<Boolean> future) -> { final Throwable cause; if (future.isSuccess()) { if (future.get()) { return; } cause = UnhealthyChannelException.INSTANCE; } else { cause = future.cause(); } this.exceptionCaught(context, cause); }); return; } if (event instanceof RntbdContext) { this.contextFuture.complete((RntbdContext) event); this.removeContextNegotiatorAndFlushPendingWrites(context); return; } if (event instanceof RntbdContextException) { this.contextFuture.completeExceptionally((RntbdContextException) event); context.pipeline().flush().close(); return; } context.fireUserEventTriggered(event); } catch (Throwable error) { reportIssue(context, "{}: ", event, error); this.exceptionCaught(context, error); } } /** * Called once a bind operation is made. * * @param context the {@link ChannelHandlerContext} for which the bind operation is made * @param localAddress the {@link SocketAddress} to which it should bound * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void bind(final ChannelHandlerContext context, final SocketAddress localAddress, final ChannelPromise promise) { this.traceOperation(context, "bind", localAddress); context.bind(localAddress, promise); } /** * Called once a close operation is made. * * @param context the {@link ChannelHandlerContext} for which the close operation is made * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void close(final ChannelHandlerContext context, final ChannelPromise promise) { this.traceOperation(context, "close"); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, ON_CLOSE); } else { logger.debug("{} closed exceptionally", context); } final SslHandler sslHandler = context.pipeline().get(SslHandler.class); if (sslHandler != null) { try { sslHandler.closeOutbound(); } catch (Exception exception) { if (exception instanceof SSLException) { logger.debug( "SslException when attempting to close the outbound SSL connection: ", exception); } else { logger.warn( "Exception when attempting to close the outbound SSL connection: ", exception); throw exception; } } } context.close(promise); } /** * Called once a connect operation is made. * * @param context the {@link ChannelHandlerContext} for which the connect operation is made * @param remoteAddress the {@link SocketAddress} to which it should connect * @param localAddress the {@link SocketAddress} which is used as source on connect * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void connect( final ChannelHandlerContext context, final SocketAddress remoteAddress, final SocketAddress localAddress, final ChannelPromise promise ) { this.traceOperation(context, "connect", remoteAddress, localAddress); context.connect(remoteAddress, localAddress, promise); } /** * Called once a deregister operation is made from the current registered {@link EventLoop}. * * @param context the {@link ChannelHandlerContext} for which the deregister operation is made * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void deregister(final ChannelHandlerContext context, final ChannelPromise promise) { this.traceOperation(context, "deregister"); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, ON_DEREGISTER); } else { logger.debug("{} deregistered exceptionally", context); } context.deregister(promise); } /** * Called once a disconnect operation is made. * * @param context the {@link ChannelHandlerContext} for which the disconnect operation is made * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void disconnect(final ChannelHandlerContext context, final ChannelPromise promise) { this.traceOperation(context, "disconnect"); context.disconnect(promise); } /** * Called once a flush operation is made * <p> * The flush operation will try to flush out all previous written messages that are pending. * * @param context the {@link ChannelHandlerContext} for which the flush operation is made */ @Override public void flush(final ChannelHandlerContext context) { this.traceOperation(context, "flush"); context.flush(); } /** * Intercepts {@link ChannelHandlerContext * * @param context the {@link ChannelHandlerContext} for which the read operation is made */ @Override public void read(final ChannelHandlerContext context) { this.traceOperation(context, "read"); context.read(); } /** * Called once a write operation is made * <p> * The write operation will send messages through the {@link ChannelPipeline} which are then ready to be flushed * to the actual {@link Channel}. This will occur when {@link Channel * * @param context the {@link ChannelHandlerContext} for which the write operation is made * @param message the message to write * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void write(final ChannelHandlerContext context, final Object message, final ChannelPromise promise) { this.traceOperation(context, "write", message); if (message instanceof RntbdRequestRecord) { final RntbdRequestRecord record = (RntbdRequestRecord) message; this.timestamps.channelWriteAttempted(); record.setSendingRequestHasStarted(); context.write(this.addPendingRequestRecord(context, record), promise).addListener(completed -> { record.stage(RntbdRequestRecord.Stage.SENT); if (completed.isSuccess()) { this.timestamps.channelWriteCompleted(); } }); return; } if (message == RntbdHealthCheckRequest.MESSAGE) { context.write(RntbdHealthCheckRequest.MESSAGE, promise).addListener(completed -> { if (completed.isSuccess()) { this.timestamps.channelPingCompleted(); } }); return; } final IllegalStateException error = new IllegalStateException(lenientFormat("message of %s: %s", message.getClass(), message)); reportIssue(context, "", error); this.exceptionCaught(context, error); } int pendingRequestCount() { return this.pendingRequests.size(); } Optional<RntbdContext> rntbdContext() { return Optional.of(this.contextFuture.getNow(null)); } CompletableFuture<RntbdContextRequest> rntbdContextRequestFuture() { return this.contextRequestFuture; } boolean hasRequestedRntbdContext() { return this.contextRequestFuture.getNow(null) != null; } boolean hasRntbdContext() { return this.contextFuture.getNow(null) != null; } boolean isServiceable(final int demand) { reportIssueUnless(this.hasRequestedRntbdContext(), this, "Direct TCP context request was not issued"); final int limit = this.hasRntbdContext() ? this.pendingRequestLimit : Math.min(this.pendingRequestLimit, demand); return this.pendingRequests.size() < limit; } void pendWrite(final ByteBuf out, final ChannelPromise promise) { this.pendingWrites.add(out, promise); } Timestamps snapshotTimestamps() { return new Timestamps(this.timestamps); } private RntbdRequestRecord addPendingRequestRecord(final ChannelHandlerContext context, final RntbdRequestRecord record) { return this.pendingRequests.compute(record.transportRequestId(), (id, current) -> { reportIssueUnless(current == null, context, "id: {}, current: {}, request: {}", record); record.pendingRequestQueueSize(pendingRequests.size()); final Timeout pendingRequestTimeout = record.newTimeout(timeout -> { requestExpirationExecutor.execute(record::expire); }); record.whenComplete((response, error) -> { this.pendingRequests.remove(id); pendingRequestTimeout.cancel(); }); return record; }); } private void completeAllPendingRequestsExceptionally( final ChannelHandlerContext context, final Throwable throwable ) { reportIssueUnless(!this.closingExceptionally, context, "", throwable); this.closingExceptionally = true; if (this.pendingWrites != null && !this.pendingWrites.isEmpty()) { this.pendingWrites.releaseAndFailAll(context, throwable); } if (this.pendingRequests.isEmpty()) { return; } if (!this.contextRequestFuture.isDone()) { this.contextRequestFuture.completeExceptionally(throwable); } if (!this.contextFuture.isDone()) { this.contextFuture.completeExceptionally(throwable); } final int count = this.pendingRequests.size(); Exception contextRequestException = null; String phrase = null; if (this.contextRequestFuture.isCompletedExceptionally()) { try { this.contextRequestFuture.get(); } catch (final CancellationException error) { phrase = "RNTBD context request write cancelled"; contextRequestException = error; } catch (final Exception error) { phrase = "RNTBD context request write failed"; contextRequestException = error; } catch (final Throwable error) { phrase = "RNTBD context request write failed"; contextRequestException = new ChannelException(error); } } else if (this.contextFuture.isCompletedExceptionally()) { try { this.contextFuture.get(); } catch (final CancellationException error) { phrase = "RNTBD context request read cancelled"; contextRequestException = error; } catch (final Exception error) { phrase = "RNTBD context request read failed"; contextRequestException = error; } catch (final Throwable error) { phrase = "RNTBD context request read failed"; contextRequestException = new ChannelException(error); } } else { phrase = "closed exceptionally"; } final String message = lenientFormat("%s %s with %s pending requests", context, phrase, count); final Exception cause; if (throwable instanceof ClosedChannelException) { cause = contextRequestException == null ? (ClosedChannelException) throwable : contextRequestException; } else { cause = throwable instanceof Exception ? (Exception) throwable : new ChannelException(throwable); } for (RntbdRequestRecord record : this.pendingRequests.values()) { final Map<String, String> requestHeaders = record.args().serviceRequest().getHeaders(); final String requestUri = record.args().physicalAddress().toString(); final GoneException error = new GoneException(message, cause, null, requestUri); BridgeInternal.setRequestHeaders(error, requestHeaders); record.completeExceptionally(error); } } /** * This method is called for each incoming message of type {@link RntbdResponse} to complete a request. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager request manager} belongs. * @param response the {@link RntbdResponse message} received. */ private void removeContextNegotiatorAndFlushPendingWrites(final ChannelHandlerContext context) { final RntbdContextNegotiator negotiator = context.pipeline().get(RntbdContextNegotiator.class); negotiator.removeInboundHandler(); negotiator.removeOutboundHandler(); if (!this.pendingWrites.isEmpty()) { this.pendingWrites.writeAndRemoveAll(context); context.flush(); } } private static void reportIssue(final Object subject, final String format, final Object... args) { RntbdReporter.reportIssue(logger, subject, format, args); } private static void reportIssueUnless( final boolean predicate, final Object subject, final String format, final Object... args ) { RntbdReporter.reportIssueUnless(logger, predicate, subject, format, args); } private void traceOperation(final ChannelHandlerContext context, final String operationName, final Object... args) { logger.debug("{}\n{}\n{}", operationName, context, args); } private static final class UnhealthyChannelException extends ChannelException { static final UnhealthyChannelException INSTANCE = new UnhealthyChannelException(); private UnhealthyChannelException() { super("health check failed"); } @Override public Throwable fillInStackTrace() { return this; } } }
class RntbdRequestManager implements ChannelHandler, ChannelInboundHandler, ChannelOutboundHandler { private static final ClosedChannelException ON_CHANNEL_UNREGISTERED = ThrowableUtil.unknownStackTrace(new ClosedChannelException(), RntbdRequestManager.class, "channelUnregistered"); private static final ClosedChannelException ON_CLOSE = ThrowableUtil.unknownStackTrace(new ClosedChannelException(), RntbdRequestManager.class, "close"); private static final ClosedChannelException ON_DEREGISTER = ThrowableUtil.unknownStackTrace(new ClosedChannelException(), RntbdRequestManager.class, "deregister"); private static final EventExecutor requestExpirationExecutor = new DefaultEventExecutor(new RntbdThreadFactory( "request-expirator", true, Thread.NORM_PRIORITY)); private static final Logger logger = LoggerFactory.getLogger(RntbdRequestManager.class); private final CompletableFuture<RntbdContext> contextFuture = new CompletableFuture<>(); private final CompletableFuture<RntbdContextRequest> contextRequestFuture = new CompletableFuture<>(); private final ChannelHealthChecker healthChecker; private final int pendingRequestLimit; private final ConcurrentHashMap<Long, RntbdRequestRecord> pendingRequests; private final Timestamps timestamps = new Timestamps(); private boolean closingExceptionally = false; private CoalescingBufferQueue pendingWrites; public RntbdRequestManager(final ChannelHealthChecker healthChecker, final int pendingRequestLimit) { checkArgument(pendingRequestLimit > 0, "pendingRequestLimit: %s", pendingRequestLimit); checkNotNull(healthChecker, "healthChecker"); this.pendingRequests = new ConcurrentHashMap<>(pendingRequestLimit); this.pendingRequestLimit = pendingRequestLimit; this.healthChecker = healthChecker; } /** * Gets called after the {@link ChannelHandler} was added to the actual context and it's ready to handle events. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void handlerAdded(final ChannelHandlerContext context) { this.traceOperation(context, "handlerAdded"); } /** * Gets called after the {@link ChannelHandler} was removed from the actual context and it doesn't handle events * anymore. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void handlerRemoved(final ChannelHandlerContext context) { this.traceOperation(context, "handlerRemoved"); } /** * The {@link Channel} of the {@link ChannelHandlerContext} is now active * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelActive(final ChannelHandlerContext context) { this.traceOperation(context, "channelActive"); context.fireChannelActive(); } /** * The {@link Channel} of the {@link ChannelHandlerContext} was registered and has reached the end of its lifetime * <p> * This method will only be called after the channel is closed. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelInactive(final ChannelHandlerContext context) { this.traceOperation(context, "channelInactive"); context.fireChannelInactive(); } /** * The {@link Channel} of the {@link ChannelHandlerContext} has read a message from its peer. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs. * @param message The message read. */ @Override public void channelRead(final ChannelHandlerContext context, final Object message) { this.traceOperation(context, "channelRead"); try { if (message.getClass() == RntbdResponse.class) { try { this.messageReceived(context, (RntbdResponse) message); } catch (CorruptedFrameException error) { this.exceptionCaught(context, error); } catch (Throwable throwable) { reportIssue(context, "{} ", message, throwable); this.exceptionCaught(context, throwable); } } else { final IllegalStateException error = new IllegalStateException( lenientFormat("expected message of %s, not %s: %s", RntbdResponse.class, message.getClass(), message)); reportIssue(context, "", error); this.exceptionCaught(context, error); } } finally { if (message instanceof ReferenceCounted) { boolean released = ((ReferenceCounted) message).release(); reportIssueUnless(released, context, "failed to release message: {}", message); } } } /** * The {@link Channel} of the {@link ChannelHandlerContext} has fully consumed the most-recent message read. * <p> * If {@link ChannelOption * {@link Channel} will be made until {@link ChannelHandlerContext * for outbound messages to be written. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelReadComplete(final ChannelHandlerContext context) { this.traceOperation(context, "channelReadComplete"); this.timestamps.channelReadCompleted(); context.fireChannelReadComplete(); } /** * Constructs a {@link CoalescingBufferQueue} for buffering encoded requests until we have an {@link RntbdRequest} * <p> * This method then calls {@link ChannelHandlerContext * {@link ChannelInboundHandler} in the {@link ChannelPipeline}. * <p> * Sub-classes may override this method to change behavior. * * @param context the {@link ChannelHandlerContext} for which the bind operation is made */ @Override public void channelRegistered(final ChannelHandlerContext context) { this.traceOperation(context, "channelRegistered"); reportIssueUnless(this.pendingWrites == null, context, "pendingWrites: {}", pendingWrites); this.pendingWrites = new CoalescingBufferQueue(context.channel()); context.fireChannelRegistered(); } /** * The {@link Channel} of the {@link ChannelHandlerContext} was unregistered from its {@link EventLoop} * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelUnregistered(final ChannelHandlerContext context) { this.traceOperation(context, "channelUnregistered"); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, ON_CHANNEL_UNREGISTERED); } else { logger.debug("{} channelUnregistered exceptionally", context); } context.fireChannelUnregistered(); } /** * Gets called once the writable state of a {@link Channel} changed. You can check the state with * {@link Channel * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs */ @Override public void channelWritabilityChanged(final ChannelHandlerContext context) { this.traceOperation(context, "channelWritabilityChanged"); context.fireChannelWritabilityChanged(); } /** * Processes {@link ChannelHandlerContext * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs * @param cause Exception caught */ @Override @SuppressWarnings("deprecation") public void exceptionCaught(final ChannelHandlerContext context, final Throwable cause) { this.traceOperation(context, "exceptionCaught", cause); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, cause); logger.debug("{} closing due to:", context, cause); context.flush().close(); } } /** * Processes inbound events triggered by channel handlers in the {@link RntbdClientChannelHandler} pipeline * <p> * All but inbound request management events are ignored. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager} belongs * @param event An object representing a user event */ @Override public void userEventTriggered(final ChannelHandlerContext context, final Object event) { this.traceOperation(context, "userEventTriggered", event); try { if (event instanceof IdleStateEvent) { this.healthChecker.isHealthy(context.channel()).addListener((Future<Boolean> future) -> { final Throwable cause; if (future.isSuccess()) { if (future.get()) { return; } cause = UnhealthyChannelException.INSTANCE; } else { cause = future.cause(); } this.exceptionCaught(context, cause); }); return; } if (event instanceof RntbdContext) { this.contextFuture.complete((RntbdContext) event); this.removeContextNegotiatorAndFlushPendingWrites(context); return; } if (event instanceof RntbdContextException) { this.contextFuture.completeExceptionally((RntbdContextException) event); context.pipeline().flush().close(); return; } context.fireUserEventTriggered(event); } catch (Throwable error) { reportIssue(context, "{}: ", event, error); this.exceptionCaught(context, error); } } /** * Called once a bind operation is made. * * @param context the {@link ChannelHandlerContext} for which the bind operation is made * @param localAddress the {@link SocketAddress} to which it should bound * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void bind(final ChannelHandlerContext context, final SocketAddress localAddress, final ChannelPromise promise) { this.traceOperation(context, "bind", localAddress); context.bind(localAddress, promise); } /** * Called once a close operation is made. * * @param context the {@link ChannelHandlerContext} for which the close operation is made * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void close(final ChannelHandlerContext context, final ChannelPromise promise) { this.traceOperation(context, "close"); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, ON_CLOSE); } else { logger.debug("{} closed exceptionally", context); } final SslHandler sslHandler = context.pipeline().get(SslHandler.class); if (sslHandler != null) { try { sslHandler.closeOutbound(); } catch (Exception exception) { if (exception instanceof SSLException) { logger.debug( "SslException when attempting to close the outbound SSL connection: ", exception); } else { logger.warn( "Exception when attempting to close the outbound SSL connection: ", exception); throw exception; } } } context.close(promise); } /** * Called once a connect operation is made. * * @param context the {@link ChannelHandlerContext} for which the connect operation is made * @param remoteAddress the {@link SocketAddress} to which it should connect * @param localAddress the {@link SocketAddress} which is used as source on connect * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void connect( final ChannelHandlerContext context, final SocketAddress remoteAddress, final SocketAddress localAddress, final ChannelPromise promise ) { this.traceOperation(context, "connect", remoteAddress, localAddress); context.connect(remoteAddress, localAddress, promise); } /** * Called once a deregister operation is made from the current registered {@link EventLoop}. * * @param context the {@link ChannelHandlerContext} for which the deregister operation is made * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void deregister(final ChannelHandlerContext context, final ChannelPromise promise) { this.traceOperation(context, "deregister"); if (!this.closingExceptionally) { this.completeAllPendingRequestsExceptionally(context, ON_DEREGISTER); } else { logger.debug("{} deregistered exceptionally", context); } context.deregister(promise); } /** * Called once a disconnect operation is made. * * @param context the {@link ChannelHandlerContext} for which the disconnect operation is made * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void disconnect(final ChannelHandlerContext context, final ChannelPromise promise) { this.traceOperation(context, "disconnect"); context.disconnect(promise); } /** * Called once a flush operation is made * <p> * The flush operation will try to flush out all previous written messages that are pending. * * @param context the {@link ChannelHandlerContext} for which the flush operation is made */ @Override public void flush(final ChannelHandlerContext context) { this.traceOperation(context, "flush"); context.flush(); } /** * Intercepts {@link ChannelHandlerContext * * @param context the {@link ChannelHandlerContext} for which the read operation is made */ @Override public void read(final ChannelHandlerContext context) { this.traceOperation(context, "read"); context.read(); } /** * Called once a write operation is made * <p> * The write operation will send messages through the {@link ChannelPipeline} which are then ready to be flushed * to the actual {@link Channel}. This will occur when {@link Channel * * @param context the {@link ChannelHandlerContext} for which the write operation is made * @param message the message to write * @param promise the {@link ChannelPromise} to notify once the operation completes */ @Override public void write(final ChannelHandlerContext context, final Object message, final ChannelPromise promise) { this.traceOperation(context, "write", message); if (message instanceof RntbdRequestRecord) { final RntbdRequestRecord record = (RntbdRequestRecord) message; this.timestamps.channelWriteAttempted(); record.setSendingRequestHasStarted(); context.write(this.addPendingRequestRecord(context, record), promise).addListener(completed -> { record.stage(RntbdRequestRecord.Stage.SENT); if (completed.isSuccess()) { this.timestamps.channelWriteCompleted(); } }); return; } if (message == RntbdHealthCheckRequest.MESSAGE) { context.write(RntbdHealthCheckRequest.MESSAGE, promise).addListener(completed -> { if (completed.isSuccess()) { this.timestamps.channelPingCompleted(); } }); return; } final IllegalStateException error = new IllegalStateException(lenientFormat("message of %s: %s", message.getClass(), message)); reportIssue(context, "", error); this.exceptionCaught(context, error); } int pendingRequestCount() { return this.pendingRequests.size(); } Optional<RntbdContext> rntbdContext() { return Optional.of(this.contextFuture.getNow(null)); } CompletableFuture<RntbdContextRequest> rntbdContextRequestFuture() { return this.contextRequestFuture; } boolean hasRequestedRntbdContext() { return this.contextRequestFuture.getNow(null) != null; } boolean hasRntbdContext() { return this.contextFuture.getNow(null) != null; } RntbdChannelState getChannelState(final int demand) { reportIssueUnless(this.hasRequestedRntbdContext(), this, "Direct TCP context request was not issued"); final int limit = this.hasRntbdContext() ? this.pendingRequestLimit : Math.min(this.pendingRequestLimit, demand); if (this.pendingRequests.size() < limit) { return RntbdChannelState.ok(this.pendingRequests.size()); } if (this.hasRntbdContext()) { return RntbdChannelState.pendingLimit(this.pendingRequests.size()); } else { return RntbdChannelState.contextNegotiationPending((this.pendingRequests.size())); } } void pendWrite(final ByteBuf out, final ChannelPromise promise) { this.pendingWrites.add(out, promise); } Timestamps snapshotTimestamps() { return new Timestamps(this.timestamps); } private RntbdRequestRecord addPendingRequestRecord(final ChannelHandlerContext context, final RntbdRequestRecord record) { return this.pendingRequests.compute(record.transportRequestId(), (id, current) -> { reportIssueUnless(current == null, context, "id: {}, current: {}, request: {}", record); record.pendingRequestQueueSize(pendingRequests.size()); final Timeout pendingRequestTimeout = record.newTimeout(timeout -> { requestExpirationExecutor.execute(record::expire); }); record.whenComplete((response, error) -> { this.pendingRequests.remove(id); pendingRequestTimeout.cancel(); }); return record; }); } private void completeAllPendingRequestsExceptionally( final ChannelHandlerContext context, final Throwable throwable ) { reportIssueUnless(!this.closingExceptionally, context, "", throwable); this.closingExceptionally = true; if (this.pendingWrites != null && !this.pendingWrites.isEmpty()) { this.pendingWrites.releaseAndFailAll(context, throwable); } if (this.pendingRequests.isEmpty()) { return; } if (!this.contextRequestFuture.isDone()) { this.contextRequestFuture.completeExceptionally(throwable); } if (!this.contextFuture.isDone()) { this.contextFuture.completeExceptionally(throwable); } final int count = this.pendingRequests.size(); Exception contextRequestException = null; String phrase = null; if (this.contextRequestFuture.isCompletedExceptionally()) { try { this.contextRequestFuture.get(); } catch (final CancellationException error) { phrase = "RNTBD context request write cancelled"; contextRequestException = error; } catch (final Exception error) { phrase = "RNTBD context request write failed"; contextRequestException = error; } catch (final Throwable error) { phrase = "RNTBD context request write failed"; contextRequestException = new ChannelException(error); } } else if (this.contextFuture.isCompletedExceptionally()) { try { this.contextFuture.get(); } catch (final CancellationException error) { phrase = "RNTBD context request read cancelled"; contextRequestException = error; } catch (final Exception error) { phrase = "RNTBD context request read failed"; contextRequestException = error; } catch (final Throwable error) { phrase = "RNTBD context request read failed"; contextRequestException = new ChannelException(error); } } else { phrase = "closed exceptionally"; } final String message = lenientFormat("%s %s with %s pending requests", context, phrase, count); final Exception cause; if (throwable instanceof ClosedChannelException) { cause = contextRequestException == null ? (ClosedChannelException) throwable : contextRequestException; } else { cause = throwable instanceof Exception ? (Exception) throwable : new ChannelException(throwable); } for (RntbdRequestRecord record : this.pendingRequests.values()) { final Map<String, String> requestHeaders = record.args().serviceRequest().getHeaders(); final String requestUri = record.args().physicalAddress().toString(); final GoneException error = new GoneException(message, cause, null, requestUri); BridgeInternal.setRequestHeaders(error, requestHeaders); record.completeExceptionally(error); } } /** * This method is called for each incoming message of type {@link RntbdResponse} to complete a request. * * @param context {@link ChannelHandlerContext} to which this {@link RntbdRequestManager request manager} belongs. * @param response the {@link RntbdResponse message} received. */ private void removeContextNegotiatorAndFlushPendingWrites(final ChannelHandlerContext context) { final RntbdContextNegotiator negotiator = context.pipeline().get(RntbdContextNegotiator.class); negotiator.removeInboundHandler(); negotiator.removeOutboundHandler(); if (!this.pendingWrites.isEmpty()) { this.pendingWrites.writeAndRemoveAll(context); context.flush(); } } private static void reportIssue(final Object subject, final String format, final Object... args) { RntbdReporter.reportIssue(logger, subject, format, args); } private static void reportIssueUnless( final boolean predicate, final Object subject, final String format, final Object... args ) { RntbdReporter.reportIssueUnless(logger, predicate, subject, format, args); } private void traceOperation(final ChannelHandlerContext context, final String operationName, final Object... args) { logger.trace("{}\n{}\n{}", operationName, context, args); } private static final class UnhealthyChannelException extends ChannelException { static final UnhealthyChannelException INSTANCE = new UnhealthyChannelException(); private UnhealthyChannelException() { super("health check failed"); } @Override public Throwable fillInStackTrace() { return this; } } }
this function is just called by line 80, is this a function for debugging?
private static String addLineNumber(String code) { String[] lines = code.split("\n"); StringBuilder builder = new StringBuilder(); for (int i = 0; i < lines.length; i++) { builder.append(i + 1).append(" ").append(lines[i]).append("\n"); } return builder.toString(); }
builder.append(i + 1).append(" ").append(lines[i]).append("\n");
private static String addLineNumber(String code) { String[] lines = code.split("\n"); StringBuilder builder = new StringBuilder(); for (int i = 0; i < lines.length; i++) { builder.append(i + 1).append(" ").append(lines[i]).append("\n"); } return builder.toString(); }
class " + name, e); } }
class " + name, e); } } /** * To output more information when an error occurs. * Generally, when cook fails, it shows which line is wrong. This line number starts at 1. */
Shall we convert the static collections to instance collections?
private void loadServices() { if (!CodeActionProvidersHolder.nodeBasedProviders.isEmpty()) { return; } ServiceLoader<LSCodeActionProvider> serviceLoader = ServiceLoader.load(LSCodeActionProvider.class); for (CodeActionNodeType nodeType : CodeActionNodeType.values()) { CodeActionProvidersHolder.nodeBasedProviders.put(nodeType, new ArrayList<>()); } for (LSCodeActionProvider provider : serviceLoader) { if (provider == null) { continue; } if (provider.isNodeBasedSupported()) { for (CodeActionNodeType nodeType : provider.getCodeActionNodeTypes()) { CodeActionProvidersHolder.nodeBasedProviders.get(nodeType).add(provider); } } if (provider.isDiagBasedSupported()) { CodeActionProvidersHolder.diagnosticsBasedProviders.add(provider); } } }
CodeActionProvidersHolder.nodeBasedProviders.put(nodeType, new ArrayList<>());
private void loadServices() { if (!CodeActionProvidersHolder.nodeBasedProviders.isEmpty()) { return; } ServiceLoader<LSCodeActionProvider> serviceLoader = ServiceLoader.load(LSCodeActionProvider.class); for (CodeActionNodeType nodeType : CodeActionNodeType.values()) { CodeActionProvidersHolder.nodeBasedProviders.put(nodeType, new ArrayList<>()); } for (LSCodeActionProvider provider : serviceLoader) { if (provider == null) { continue; } if (provider.isNodeBasedSupported()) { for (CodeActionNodeType nodeType : provider.getCodeActionNodeTypes()) { CodeActionProvidersHolder.nodeBasedProviders.get(nodeType).add(provider); } } if (provider.isDiagBasedSupported()) { CodeActionProvidersHolder.diagnosticsBasedProviders.add(provider); } } }
class CodeActionProvidersHolder { private static final Map<CodeActionNodeType, List<LSCodeActionProvider>> nodeBasedProviders = new HashMap<>(); private static final List<LSCodeActionProvider> diagnosticsBasedProviders = new ArrayList<>(); private static final LanguageServerContext.Key<CodeActionProvidersHolder> CODE_ACTION_PROVIDERS_HOLDER_KEY = new LanguageServerContext.Key<>(); /** * Returns the instance of Holder. * * @return code action provider holder instance */ public static CodeActionProvidersHolder getInstance(LanguageServerContext serverContext) { CodeActionProvidersHolder codeActionProvidersHolder = serverContext.get(CODE_ACTION_PROVIDERS_HOLDER_KEY); if (codeActionProvidersHolder == null) { codeActionProvidersHolder = new CodeActionProvidersHolder(serverContext); } return codeActionProvidersHolder; } private CodeActionProvidersHolder(LanguageServerContext serverContext) { serverContext.put(CODE_ACTION_PROVIDERS_HOLDER_KEY, this); loadServices(); } /** * Returns active node based providers for this node type. * * @param nodeType node type * @return node based providers */ List<LSCodeActionProvider> getActiveNodeBasedProviders(CodeActionNodeType nodeType, CodeActionContext ctx) { if (CodeActionProvidersHolder.nodeBasedProviders.containsKey(nodeType)) { return CodeActionProvidersHolder.nodeBasedProviders.get(nodeType).stream() .filter(provider -> provider.isEnabled(ctx.languageServercontext())) .sorted(Comparator.comparingInt(LSCodeActionProvider::priority)) .collect(Collectors.toList()); } return Collections.emptyList(); } /** * Returns active diagnostic based providers. * * @return diagnostic based providers */ List<LSCodeActionProvider> getActiveDiagnosticsBasedProviders(CodeActionContext ctx) { return CodeActionProvidersHolder.diagnosticsBasedProviders.stream() .filter(provider -> provider.isEnabled(ctx.languageServercontext())) .sorted(Comparator.comparingInt(LSCodeActionProvider::priority)) .collect(Collectors.toList()); } }
class CodeActionProvidersHolder { private static final Map<CodeActionNodeType, List<LSCodeActionProvider>> nodeBasedProviders = new HashMap<>(); private static final List<LSCodeActionProvider> diagnosticsBasedProviders = new ArrayList<>(); private static final LanguageServerContext.Key<CodeActionProvidersHolder> CODE_ACTION_PROVIDERS_HOLDER_KEY = new LanguageServerContext.Key<>(); /** * Returns the instance of Holder. * * @return code action provider holder instance */ public static CodeActionProvidersHolder getInstance(LanguageServerContext serverContext) { CodeActionProvidersHolder codeActionProvidersHolder = serverContext.get(CODE_ACTION_PROVIDERS_HOLDER_KEY); if (codeActionProvidersHolder == null) { codeActionProvidersHolder = new CodeActionProvidersHolder(serverContext); } return codeActionProvidersHolder; } private CodeActionProvidersHolder(LanguageServerContext serverContext) { serverContext.put(CODE_ACTION_PROVIDERS_HOLDER_KEY, this); loadServices(); } /** * Returns active node based providers for this node type. * * @param nodeType node type * @return node based providers */ List<LSCodeActionProvider> getActiveNodeBasedProviders(CodeActionNodeType nodeType, CodeActionContext ctx) { if (CodeActionProvidersHolder.nodeBasedProviders.containsKey(nodeType)) { return CodeActionProvidersHolder.nodeBasedProviders.get(nodeType).stream() .filter(provider -> provider.isEnabled(ctx.languageServercontext())) .sorted(Comparator.comparingInt(LSCodeActionProvider::priority)) .collect(Collectors.toList()); } return Collections.emptyList(); } /** * Returns active diagnostic based providers. * * @return diagnostic based providers */ List<LSCodeActionProvider> getActiveDiagnosticsBasedProviders(CodeActionContext ctx) { return CodeActionProvidersHolder.diagnosticsBasedProviders.stream() .filter(provider -> provider.isEnabled(ctx.languageServercontext())) .sorted(Comparator.comparingInt(LSCodeActionProvider::priority)) .collect(Collectors.toList()); } }
Currently what happens is the following: If the user doesn't specify anything, the default is used. If the user does specify something that is not resolvable from the classpath root, we print a warning message. If it is resolvable, we show it and also show the "Powered by..." text
private String readBannerFile(BannerConfig config) { URL resource = Thread.currentThread().getContextClassLoader().getResource(config.path); if (resource != null) { try (InputStream is = resource.openStream()) { byte[] content = FileUtil.readFileContents(is); StringBuilder bannerTitle = new StringBuilder(new String(content, StandardCharsets.UTF_8)); bannerTitle.append('\n'); if (!config.isDefaultPath()) { bannerTitle.append("Powered by Quarkus v").append(Version.getVersion()).append('\n'); } return bannerTitle.toString(); } catch (IOException e) { throw new UncheckedIOException(e); } } else { logger.warn("Could not read banner file"); return ""; } }
if (!config.isDefaultPath()) {
private String readBannerFile(BannerConfig config) { try { Map.Entry<URL, Boolean> entry = getBanner(config); URL bannerResourceURL = entry.getKey(); if (bannerResourceURL == null) { logger.warn("Could not locate banner file"); return ""; } try (InputStream is = bannerResourceURL.openStream()) { byte[] content = FileUtil.readFileContents(is); String bannerTitle = new String(content, StandardCharsets.UTF_8); int width = 0; Scanner scanner = new Scanner(bannerTitle); while (scanner.hasNextLine()) { width = Math.max(width, scanner.nextLine().length()); } String tagline = "\n"; Boolean isDefaultBanner = entry.getValue(); if (!isDefaultBanner) { tagline = String.format("\n%" + width + "s\n", "Powered by Quarkus v" + Version.getVersion()); } return bannerTitle + tagline; } } catch (IOException e) { logger.warn("Unable to read banner file"); return ""; } }
class }) @Record(ExecutionTime.RUNTIME_INIT) public ConsoleFormatterBannerBuildItem recordBanner(BannerRecorder recorder, BannerConfig config, BannerRuntimeConfig bannerRuntimeConfig) { String bannerText = readBannerFile(config); return new ConsoleFormatterBannerBuildItem(recorder.provideBannerSupplier(bannerText, bannerRuntimeConfig)); }
class }) @Record(ExecutionTime.RUNTIME_INIT) public ConsoleFormatterBannerBuildItem recordBanner(BannerRecorder recorder, BannerConfig config, BannerRuntimeConfig bannerRuntimeConfig) { String bannerText = readBannerFile(config); return new ConsoleFormatterBannerBuildItem(recorder.provideBannerSupplier(bannerText, bannerRuntimeConfig)); }
nit, also remove the `public` in function `testJoinWithFilter()` below. BTW, maybe we need to consider how to make the weak constraint about removing `public` in test functions more conspicuous.
void testLeftOuterJoinWithLiteralTrue() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable LEFT JOIN LATERAL TABLE(func1(c)) AS T(s) ON TRUE"; util.verifyJsonPlan(sqlQuery); }
+ " a varchar,\n"
void testLeftOuterJoinWithLiteralTrue() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable LEFT JOIN LATERAL TABLE(func1(c)) AS T(s) ON TRUE"; util.verifyJsonPlan(sqlQuery); }
class CorrelateJsonPlanTest extends TableTestBase { private StreamTableTestUtil util; private TableEnvironment tEnv; @BeforeEach void setup() { util = streamTestUtil(TableConfig.getDefault()); tEnv = util.getTableEnv(); String srcTableDdl = "CREATE TABLE MyTable (\n" + " a bigint,\n" + " b int not null,\n" + " c varchar,\n" + " d timestamp(3)\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'false')"; tEnv.executeSql(srcTableDdl); } @Test void testCrossJoin() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable, LATERAL TABLE(func1(c)) AS T(s)"; util.verifyJsonPlan(sqlQuery); } @Test @Disabled("the case is ignored because of FLINK-21870") void testRegisterByClass() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); tEnv.createTemporaryFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable, LATERAL TABLE(func1(c)) AS T(s)"; util.verifyJsonPlan(sqlQuery); } @Test void testCrossJoinOverrideParameters() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable, LATERAL TABLE(func1(c, '$')) AS T(s)"; util.verifyJsonPlan(sqlQuery); } @Test @Test public void testJoinWithFilter() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink " + "select * from (SELECT c, s FROM MyTable, LATERAL TABLE(func1(c)) AS T(s)) as T2 where c = s"; util.verifyJsonPlan(sqlQuery); } }
class CorrelateJsonPlanTest extends TableTestBase { private StreamTableTestUtil util; private TableEnvironment tEnv; @BeforeEach void setup() { util = streamTestUtil(TableConfig.getDefault()); tEnv = util.getTableEnv(); String srcTableDdl = "CREATE TABLE MyTable (\n" + " a bigint,\n" + " b int not null,\n" + " c varchar,\n" + " d timestamp(3)\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'false')"; tEnv.executeSql(srcTableDdl); } @Test void testCrossJoin() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable, LATERAL TABLE(func1(c)) AS T(s)"; util.verifyJsonPlan(sqlQuery); } @Test @Disabled("the case is ignored because of FLINK-21870") void testRegisterByClass() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); tEnv.createTemporaryFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable, LATERAL TABLE(func1(c)) AS T(s)"; util.verifyJsonPlan(sqlQuery); } @Test void testCrossJoinOverrideParameters() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink SELECT c, s FROM MyTable, LATERAL TABLE(func1(c, '$')) AS T(s)"; util.verifyJsonPlan(sqlQuery); } @Test @Test void testJoinWithFilter() { String sinkTableDdl = "CREATE TABLE MySink (\n" + " a varchar,\n" + " b varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')"; tEnv.executeSql(sinkTableDdl); util.addTemporarySystemFunction("func1", TableFunc1.class); String sqlQuery = "insert into MySink " + "select * from (SELECT c, s FROM MyTable, LATERAL TABLE(func1(c)) AS T(s)) as T2 where c = s"; util.verifyJsonPlan(sqlQuery); } }
Also, just checked the sample_invoice.jpg currently is returning just these fields ```java "Amount" "Description" "Quantity" "UnitPrice" ```
public static void main(final String[] args) throws IOException { FormRecognizerClient client = new FormRecognizerClientBuilder() .credential(new AzureKeyCredential("{key}")) .endpoint("https: .buildClient(); String invoiceUrl = "https: + "azure-ai-formrecognizer/samples/sample_forms/forms/sample_invoice.jpg"; SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> recognizeInvoicesPoller = client.beginRecognizeInvoicesFromUrl(invoiceUrl); List<RecognizedForm> recognizedInvoices = recognizeInvoicesPoller.getFinalResult(); for (int i = 0; i < recognizedInvoices.size(); i++) { RecognizedForm recognizedInvoice = recognizedInvoices.get(i); Map<String, FormField> recognizedFields = recognizedInvoice.getFields(); System.out.printf("----------- Recognized invoice info for page %d -----------%n", i); FormField vendorNameField = recognizedFields.get("VendorName"); if (vendorNameField != null) { if (FieldValueType.STRING == vendorNameField.getValue().getValueType()) { String merchantName = vendorNameField.getValue().asString(); System.out.printf("Vendor Name: %s, confidence: %.2f%n", merchantName, vendorNameField.getConfidence()); } } FormField vendorAddressField = recognizedFields.get("VendorAddress"); if (vendorAddressField != null) { if (FieldValueType.STRING == vendorAddressField.getValue().getValueType()) { String merchantAddress = vendorAddressField.getValue().asString(); System.out.printf("Vendor address: %s, confidence: %.2f%n", merchantAddress, vendorAddressField.getConfidence()); } } FormField customerNameField = recognizedFields.get("CustomerName"); if (customerNameField != null) { if (FieldValueType.STRING == customerNameField.getValue().getValueType()) { String merchantAddress = customerNameField.getValue().asString(); System.out.printf("Customer Name: %s, confidence: %.2f%n", merchantAddress, customerNameField.getConfidence()); } } FormField customerAddressRecipientField = recognizedFields.get("CustomerAddressRecipient"); if (customerAddressRecipientField != null) { if (FieldValueType.STRING == customerAddressRecipientField.getValue().getValueType()) { String customerAddr = customerAddressRecipientField.getValue().asString(); System.out.printf("Customer Address Recipient: %s, confidence: %.2f%n", customerAddr, customerAddressRecipientField.getConfidence()); } } FormField invoiceIdField = recognizedFields.get("InvoiceId"); if (invoiceIdField != null) { if (FieldValueType.STRING == invoiceIdField.getValue().getValueType()) { String invoiceId = invoiceIdField.getValue().asString(); System.out.printf("Invoice Id: %s, confidence: %.2f%n", invoiceId, invoiceIdField.getConfidence()); } } FormField invoiceDateField = recognizedFields.get("InvoiceDate"); if (customerNameField != null) { if (FieldValueType.DATE == invoiceDateField.getValue().getValueType()) { LocalDate invoiceDate = invoiceDateField.getValue().asDate(); System.out.printf("Invoice Date: %s, confidence: %.2f%n", invoiceDate, invoiceDateField.getConfidence()); } } FormField invoiceTotalField = recognizedFields.get("InvoiceTotal"); if (customerAddressRecipientField != null) { if (FieldValueType.FLOAT == invoiceTotalField.getValue().getValueType()) { Float invoiceTotal = invoiceTotalField.getValue().asFloat(); System.out.printf("Invoice Total: %.2f, confidence: %.2f%n", invoiceTotal, invoiceTotalField.getConfidence()); } } FormField invoiceItemsField = recognizedFields.get("Items"); if (invoiceItemsField != null) { System.out.printf("Invoice Items: %n"); if (FieldValueType.LIST == invoiceItemsField.getValue().getValueType()) { List<FormField> invoiceItems = invoiceItemsField.getValue().asList(); invoiceItems.stream() .filter(invoiceItem -> FieldValueType.MAP == invoiceItem.getValue().getValueType()) .map(formField -> formField.getValue().asMap()) .forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> { if ("Description".equals(key)) { if (FieldValueType.STRING == formField.getValue().getValueType()) { String name = formField.getValue().asString(); System.out.printf("Description: %s, confidence: %.2fs%n", name, formField.getConfidence()); } } if ("Quantity".equals(key)) { if (FieldValueType.FLOAT == formField.getValue().getValueType()) { Float quantity = formField.getValue().asFloat(); System.out.printf("Quantity: %f, confidence: %.2f%n", quantity, formField.getConfidence()); } } if ("Unit".equals(key)) { if (FieldValueType.FLOAT == formField.getValue().getValueType()) { Float unit = formField.getValue().asFloat(); System.out.printf("Unit: %f, confidence: %.2f%n", unit, formField.getConfidence()); } } if ("UnitPrice".equals(key)) { if (FieldValueType.FLOAT == formField.getValue().getValueType()) { Float unitPrice = formField.getValue().asFloat(); System.out.printf("Unit Price: %f, confidence: %.2f%n", unitPrice, formField.getConfidence()); } } if ("ProductCode".equals(key)) { if (FieldValueType.FLOAT == formField.getValue().getValueType()) { Float productCode = formField.getValue().asFloat(); System.out.printf("Product Code: %f, confidence: %.2f%n", productCode, formField.getConfidence()); } } })); } } } }
Float unit = formField.getValue().asFloat();
public static void main(final String[] args) throws IOException { FormRecognizerClient client = new FormRecognizerClientBuilder() .credential(new AzureKeyCredential("{key}")) .endpoint("https: .buildClient(); String invoiceUrl = "https: + "azure-ai-formrecognizer/samples/sample_forms/forms/sample_invoice.jpg"; SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> recognizeInvoicesPoller = client.beginRecognizeInvoicesFromUrl(invoiceUrl); List<RecognizedForm> recognizedInvoices = recognizeInvoicesPoller.getFinalResult(); for (int i = 0; i < recognizedInvoices.size(); i++) { RecognizedForm recognizedInvoice = recognizedInvoices.get(i); Map<String, FormField> recognizedFields = recognizedInvoice.getFields(); System.out.printf("----------- Recognized invoice info for page %d -----------%n", i); FormField vendorNameField = recognizedFields.get("VendorName"); if (vendorNameField != null) { if (FieldValueType.STRING == vendorNameField.getValue().getValueType()) { String merchantName = vendorNameField.getValue().asString(); System.out.printf("Vendor Name: %s, confidence: %.2f%n", merchantName, vendorNameField.getConfidence()); } } FormField vendorAddressField = recognizedFields.get("VendorAddress"); if (vendorAddressField != null) { if (FieldValueType.STRING == vendorAddressField.getValue().getValueType()) { String merchantAddress = vendorAddressField.getValue().asString(); System.out.printf("Vendor address: %s, confidence: %.2f%n", merchantAddress, vendorAddressField.getConfidence()); } } FormField customerNameField = recognizedFields.get("CustomerName"); if (customerNameField != null) { if (FieldValueType.STRING == customerNameField.getValue().getValueType()) { String merchantAddress = customerNameField.getValue().asString(); System.out.printf("Customer Name: %s, confidence: %.2f%n", merchantAddress, customerNameField.getConfidence()); } } FormField customerAddressRecipientField = recognizedFields.get("CustomerAddressRecipient"); if (customerAddressRecipientField != null) { if (FieldValueType.STRING == customerAddressRecipientField.getValue().getValueType()) { String customerAddr = customerAddressRecipientField.getValue().asString(); System.out.printf("Customer Address Recipient: %s, confidence: %.2f%n", customerAddr, customerAddressRecipientField.getConfidence()); } } FormField invoiceIdField = recognizedFields.get("InvoiceId"); if (invoiceIdField != null) { if (FieldValueType.STRING == invoiceIdField.getValue().getValueType()) { String invoiceId = invoiceIdField.getValue().asString(); System.out.printf("Invoice Id: %s, confidence: %.2f%n", invoiceId, invoiceIdField.getConfidence()); } } FormField invoiceDateField = recognizedFields.get("InvoiceDate"); if (customerNameField != null) { if (FieldValueType.DATE == invoiceDateField.getValue().getValueType()) { LocalDate invoiceDate = invoiceDateField.getValue().asDate(); System.out.printf("Invoice Date: %s, confidence: %.2f%n", invoiceDate, invoiceDateField.getConfidence()); } } FormField invoiceTotalField = recognizedFields.get("InvoiceTotal"); if (customerAddressRecipientField != null) { if (FieldValueType.FLOAT == invoiceTotalField.getValue().getValueType()) { Float invoiceTotal = invoiceTotalField.getValue().asFloat(); System.out.printf("Invoice Total: %.2f, confidence: %.2f%n", invoiceTotal, invoiceTotalField.getConfidence()); } } FormField invoiceItemsField = recognizedFields.get("Items"); if (invoiceItemsField != null) { System.out.printf("Invoice Items: %n"); if (FieldValueType.LIST == invoiceItemsField.getValue().getValueType()) { List<FormField> invoiceItems = invoiceItemsField.getValue().asList(); invoiceItems.stream() .filter(invoiceItem -> FieldValueType.MAP == invoiceItem.getValue().getValueType()) .map(formField -> formField.getValue().asMap()) .forEach(formFieldMap -> formFieldMap.forEach((key, formField) -> { if ("Description".equals(key)) { if (FieldValueType.STRING == formField.getValue().getValueType()) { String name = formField.getValue().asString(); System.out.printf("Description: %s, confidence: %.2fs%n", name, formField.getConfidence()); } } if ("Quantity".equals(key)) { if (FieldValueType.FLOAT == formField.getValue().getValueType()) { Float quantity = formField.getValue().asFloat(); System.out.printf("Quantity: %f, confidence: %.2f%n", quantity, formField.getConfidence()); } } if ("UnitPrice".equals(key)) { if (FieldValueType.FLOAT == formField.getValue().getValueType()) { Float unitPrice = formField.getValue().asFloat(); System.out.printf("Unit Price: %f, confidence: %.2f%n", unitPrice, formField.getConfidence()); } } if ("ProductCode".equals(key)) { if (FieldValueType.FLOAT == formField.getValue().getValueType()) { Float productCode = formField.getValue().asFloat(); System.out.printf("Product Code: %f, confidence: %.2f%n", productCode, formField.getConfidence()); } } })); } } } }
class RecognizeInvoicesFromUrl { /** * Main method to invoke this demo. * * @param args Unused. Arguments to the program. * @throws IOException Exception thrown when there is an error in reading all the bytes from the File. */ }
class RecognizeInvoicesFromUrl { /** * Main method to invoke this demo. * * @param args Unused. Arguments to the program. * @throws IOException Exception thrown when there is an error in reading all the bytes from the File. */ }
shouldn't we change the order here as discussed offline? ``` .doOnError(throwable -> { this.refreshInProgress.set(null); logger.warn("Background refresh task failed", throwable); ```
private Mono<TValue> createBackgroundRefreshTask(Function<TValue, Mono<TValue>> createRefreshFunction) { return this.value .get() .flatMap(cachedValue -> createRefreshFunction.apply(cachedValue)) .flatMap(response -> { this.refreshInProgress.set(null); return this.value.updateAndGet(existingValue -> Mono.just(response)); }) .doOnError(throwable -> { logger.warn("Background refresh task failed", throwable); this.refreshInProgress.set(null); }) .cache(); }
this.refreshInProgress.set(null);
private Mono<TValue> createBackgroundRefreshTask(Function<TValue, Mono<TValue>> createRefreshFunction) { return this.value .get() .flatMap(createRefreshFunction) .flatMap(response -> { this.refreshInProgress.set(null); return this.value.updateAndGet(existingValue -> Mono.just(response)); }) .doOnError(throwable -> { this.refreshInProgress.set(null); logger.warn("Background refresh task failed", throwable); }) .cache(); }
class AsyncLazyWithRefresh<TValue> { private final AtomicBoolean removeFromCache = new AtomicBoolean(false); private final AtomicReference<Mono<TValue>> value; private final AtomicReference<Mono<TValue>> refreshInProgress; public AsyncLazyWithRefresh(TValue value) { this.value = new AtomicReference<>(); this.value.set(Mono.just(value)); this.refreshInProgress = new AtomicReference<>(null); } public AsyncLazyWithRefresh(Function<TValue, Mono<TValue>> taskFactory) { this.value = new AtomicReference<>(); this.value.set(taskFactory.apply(null).cache()); this.refreshInProgress = new AtomicReference<>(null); } public Mono<TValue> getValueAsync() { return this.value.get(); } public Mono<TValue> value() { return value.get(); } @SuppressWarnings("unchecked") public Mono<TValue> getOrCreateBackgroundRefreshTaskAsync(Function<TValue, Mono<TValue>> createRefreshFunction) { if (this.refreshInProgress.compareAndSet(null, this.createBackgroundRefreshTask(createRefreshFunction))) { logger.debug("Started a new background task"); } else { logger.debug("Background refresh task is already in progress"); } Mono<TValue> refreshInProgressSnapshot = this.refreshInProgress.get(); return refreshInProgressSnapshot == null ? this.value.get() : refreshInProgressSnapshot; } /*** * If there is no refresh in progress background task, then create a new one, else skip * * @param createRefreshFunction the createRefreshFunction * @return if there is already a refreshInProgress task ongoing, then return Mono.empty, else return the newly created background refresh task */ public Mono<TValue> refresh(Function<TValue, Mono<TValue>> createRefreshFunction) { if (this.refreshInProgress.compareAndSet(null, this.createBackgroundRefreshTask(createRefreshFunction))) { logger.debug("Started a new background task"); return this.refreshInProgress.get(); } logger.debug("Background refresh task is already in progress, skip creating a new one"); return Mono.empty(); } public boolean shouldRemoveFromCache() { return this.removeFromCache.compareAndSet(false, true); } }
class AsyncLazyWithRefresh<TValue> { private final AtomicBoolean removeFromCache = new AtomicBoolean(false); private final AtomicReference<Mono<TValue>> value; private final AtomicReference<Mono<TValue>> refreshInProgress; public AsyncLazyWithRefresh(TValue value) { this.value = new AtomicReference<>(); this.value.set(Mono.just(value)); this.refreshInProgress = new AtomicReference<>(null); } public AsyncLazyWithRefresh(Function<TValue, Mono<TValue>> taskFactory) { this.value = new AtomicReference<>(); this.value.set(taskFactory.apply(null).cache()); this.refreshInProgress = new AtomicReference<>(null); } public Mono<TValue> getValueAsync() { return this.value.get(); } public Mono<TValue> value() { return value.get(); } public Mono<TValue> getOrCreateBackgroundRefreshTaskAsync(Function<TValue, Mono<TValue>> createRefreshFunction) { Mono<TValue> refreshInProgressSnapshot = this.refreshInProgress.updateAndGet(existingMono -> { if (existingMono == null) { logger.debug("Started a new background task"); return this.createBackgroundRefreshTask(createRefreshFunction); } else { logger.debug("Background refresh task is already in progress"); } return existingMono; }); return refreshInProgressSnapshot == null ? this.value.get() : refreshInProgressSnapshot; } /*** * If there is no refresh in progress background task, then create a new one, else skip * * @param createRefreshFunction the createRefreshFunction * @return if there is already a refreshInProgress task ongoing, then return Mono.empty, else return the newly created background refresh task */ public Mono<TValue> refresh(Function<TValue, Mono<TValue>> createRefreshFunction) { if (this.refreshInProgress.compareAndSet(null, this.createBackgroundRefreshTask(createRefreshFunction))) { logger.debug("Started a new background task"); return this.refreshInProgress.get(); } logger.debug("Background refresh task is already in progress, skip creating a new one"); return null; } public boolean shouldRemoveFromCache() { return this.removeFromCache.compareAndSet(false, true); } }
Unable to obtain catalog name
public void analyze(Analyzer analyzer) throws AnalysisException, UserException { super.analyze(analyzer); if (Strings.isNullOrEmpty(db)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_DB_NAME, db); } if (!Env.getCurrentEnv().getAccessManager() .checkDbPriv(ConnectContext.get(), InternalCatalog.INTERNAL_CATALOG_NAME, db, PrivPredicate.ALTER_CREATE_DROP)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, ConnectContext.get().getQualifiedUser(), db); } }
.checkDbPriv(ConnectContext.get(), InternalCatalog.INTERNAL_CATALOG_NAME, db,
public void analyze(Analyzer analyzer) throws AnalysisException, UserException { super.analyze(analyzer); if (Strings.isNullOrEmpty(db)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_DB_NAME, db); } if (!Env.getCurrentEnv().getAccessManager() .checkDbPriv(ConnectContext.get(), InternalCatalog.INTERNAL_CATALOG_NAME, db, PrivPredicate.ALTER_CREATE_DROP)) { ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, ConnectContext.get().getQualifiedUser(), db); } }
class ShowCreateDbStmt extends ShowStmt { private static final ShowResultSetMetaData META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Database", ScalarType.createVarchar(20))) .addColumn(new Column("Create Database", ScalarType.createVarchar(30))) .build(); private String db; public ShowCreateDbStmt(String db) { this.db = db; } public String getDb() { return db; } @Override @Override public String toSql() { return "SHOW CREATE DATABASE `" + db + "`"; } @Override public String toString() { return toSql(); } @Override public ShowResultSetMetaData getMetaData() { return META_DATA; } }
class ShowCreateDbStmt extends ShowStmt { private static final ShowResultSetMetaData META_DATA = ShowResultSetMetaData.builder() .addColumn(new Column("Database", ScalarType.createVarchar(20))) .addColumn(new Column("Create Database", ScalarType.createVarchar(30))) .build(); private String db; public ShowCreateDbStmt(String db) { this.db = db; } public String getDb() { return db; } @Override @Override public String toSql() { return "SHOW CREATE DATABASE `" + db + "`"; } @Override public String toString() { return toSql(); } @Override public ShowResultSetMetaData getMetaData() { return META_DATA; } }
also plz add test case for this grant
public void saveV2(DataOutputStream dos) throws IOException { try { final int cnt = 1 + 1 + userToPrivilegeCollection.size() * 2 + 1 + roleIdToPrivilegeCollection.size() * 2; SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.AUTHORIZATION_MGR, cnt); writer.writeJson(this); writer.writeJson(userToPrivilegeCollection.size()); Iterator<Map.Entry<UserIdentity, UserPrivilegeCollection>> iterator = userToPrivilegeCollection.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<UserIdentity, UserPrivilegeCollection> entry = iterator.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.writeJson(roleIdToPrivilegeCollection.size()); Iterator<Map.Entry<Long, RolePrivilegeCollection>> roleIter = roleIdToPrivilegeCollection.entrySet().iterator(); while (roleIter.hasNext()) { Map.Entry<Long, RolePrivilegeCollection> entry = roleIter.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.close(); } catch (SRMetaBlockException e) { throw new IOException("failed to save AuthenticationManager!", e); } }
writer.writeJson(this);
public void saveV2(DataOutputStream dos) throws IOException { try { final int cnt = 1 + 1 + userToPrivilegeCollection.size() * 2 + 1 + roleIdToPrivilegeCollection.size() * 2; SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.AUTHORIZATION_MGR, cnt); writer.writeJson(this); writer.writeJson(userToPrivilegeCollection.size()); Iterator<Map.Entry<UserIdentity, UserPrivilegeCollectionV2>> iterator = userToPrivilegeCollection.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<UserIdentity, UserPrivilegeCollectionV2> entry = iterator.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.writeJson(roleIdToPrivilegeCollection.size()); Iterator<Map.Entry<Long, RolePrivilegeCollectionV2>> roleIter = roleIdToPrivilegeCollection.entrySet().iterator(); while (roleIter.hasNext()) { Map.Entry<Long, RolePrivilegeCollectionV2> entry = roleIter.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.close(); } catch (SRMetaBlockException e) { throw new IOException("failed to save AuthenticationManager!", e); } }
class AuthorizationMgr { private static final Logger LOG = LogManager.getLogger(AuthorizationMgr.class); @SerializedName(value = "r") private final Map<String, Long> roleNameToId; @SerializedName(value = "i") private short pluginId; @SerializedName(value = "v") private short pluginVersion; protected AuthorizationProvider provider; private GlobalStateMgr globalStateMgr; protected Map<UserIdentity, UserPrivilegeCollection> userToPrivilegeCollection; private static final int MAX_NUM_CACHED_MERGED_PRIVILEGE_COLLECTION = 1000; private static final int CACHED_MERGED_PRIVILEGE_COLLECTION_EXPIRE_MIN = 60; protected LoadingCache<Pair<UserIdentity, Set<Long>>, PrivilegeCollection> ctxToMergedPrivilegeCollections = CacheBuilder.newBuilder() .maximumSize(MAX_NUM_CACHED_MERGED_PRIVILEGE_COLLECTION) .expireAfterAccess(CACHED_MERGED_PRIVILEGE_COLLECTION_EXPIRE_MIN, TimeUnit.MINUTES) .build(new CacheLoader<Pair<UserIdentity, Set<Long>>, PrivilegeCollection>() { @Override public PrivilegeCollection load(@NotNull Pair<UserIdentity, Set<Long>> userIdentitySetPair) throws Exception { return loadPrivilegeCollection(userIdentitySetPair.first, userIdentitySetPair.second); } }); private final ReentrantReadWriteLock userLock; private boolean isLoaded = false; protected Map<Long, RolePrivilegeCollection> roleIdToPrivilegeCollection; private final ReentrantReadWriteLock roleLock; protected AuthorizationMgr() { roleNameToId = new HashMap<>(); userToPrivilegeCollection = new HashMap<>(); roleIdToPrivilegeCollection = new HashMap<>(); userLock = new ReentrantReadWriteLock(); roleLock = new ReentrantReadWriteLock(); } public AuthorizationMgr(GlobalStateMgr globalStateMgr, AuthorizationProvider provider) { this.globalStateMgr = globalStateMgr; if (provider == null) { this.provider = new DefaultAuthorizationProvider(); } else { this.provider = provider; } pluginId = this.provider.getPluginId(); pluginVersion = this.provider.getPluginVersion(); roleNameToId = new HashMap<>(); userLock = new ReentrantReadWriteLock(); roleLock = new ReentrantReadWriteLock(); userToPrivilegeCollection = new HashMap<>(); roleIdToPrivilegeCollection = new HashMap<>(); initBuiltinRolesAndUsers(); } public void initBuiltinRolesAndUsers() { try { RolePrivilegeCollection rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.ROOT_ROLE_ID, PrivilegeBuiltinConstants.ROOT_ROLE_NAME, "built-in root role which has all privileges on all objects"); for (ObjectType objectType : provider.getAllPrivObjectTypes()) { initPrivilegeCollectionAllObjects(rolePrivilegeCollection, objectType, provider.getAvailablePrivType(objectType)); } rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.DB_ADMIN_ROLE_ID, PrivilegeBuiltinConstants.DB_ADMIN_ROLE_NAME, "built-in database administration role"); List<PrivilegeType> actionWithoutNodeGrant = provider.getAvailablePrivType(ObjectType.SYSTEM).stream().filter( x -> !x.equals(PrivilegeType.GRANT) && !x.equals(PrivilegeType.NODE)).collect(Collectors.toList()); initPrivilegeCollections(rolePrivilegeCollection, ObjectType.SYSTEM, actionWithoutNodeGrant, null, false); for (ObjectType t : Arrays.asList( ObjectType.CATALOG, ObjectType.DATABASE, ObjectType.TABLE, ObjectType.VIEW, ObjectType.MATERIALIZED_VIEW, ObjectType.RESOURCE, ObjectType.RESOURCE_GROUP, ObjectType.FUNCTION, ObjectType.GLOBAL_FUNCTION, ObjectType.STORAGE_VOLUME)) { initPrivilegeCollectionAllObjects(rolePrivilegeCollection, t, provider.getAvailablePrivType(t)); } rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.CLUSTER_ADMIN_ROLE_ID, PrivilegeBuiltinConstants.CLUSTER_ADMIN_ROLE_NAME, "built-in cluster administration role"); initPrivilegeCollections( rolePrivilegeCollection, ObjectType.SYSTEM, Collections.singletonList(PrivilegeType.NODE), null, false); rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.USER_ADMIN_ROLE_ID, PrivilegeBuiltinConstants.USER_ADMIN_ROLE_NAME, "built-in user administration role"); initPrivilegeCollections( rolePrivilegeCollection, ObjectType.SYSTEM, Collections.singletonList(PrivilegeType.GRANT), null, false); ObjectType t = ObjectType.USER; initPrivilegeCollectionAllObjects(rolePrivilegeCollection, t, provider.getAvailablePrivType(t)); rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.PUBLIC_ROLE_ID, PrivilegeBuiltinConstants.PUBLIC_ROLE_NAME, "built-in public role which is owned by any user"); List<PEntryObject> object = Collections.singletonList(new TablePEntryObject( Long.toString(SystemId.INFORMATION_SCHEMA_DB_ID), PrivilegeBuiltinConstants.ALL_TABLES_UUID)); rolePrivilegeCollection.grant(ObjectType.TABLE, Collections.singletonList(PrivilegeType.SELECT), object, false); UserPrivilegeCollection rootCollection = new UserPrivilegeCollection(); rootCollection.grantRole(PrivilegeBuiltinConstants.ROOT_ROLE_ID); rootCollection.setDefaultRoleIds(Sets.newHashSet(PrivilegeBuiltinConstants.ROOT_ROLE_ID)); userToPrivilegeCollection.put(UserIdentity.ROOT, rootCollection); } catch (PrivilegeException e) { throw new RuntimeException("Fatal error when initializing built-in role and user", e); } } private void initPrivilegeCollections(PrivilegeCollection collection, ObjectType objectType, List<PrivilegeType> actionList, List<String> tokens, boolean isGrant) throws PrivilegeException { List<PEntryObject> object; if (tokens != null) { object = Collections.singletonList(provider.generateObject(objectType, tokens, globalStateMgr)); } else { object = Arrays.asList(new PEntryObject[] {null}); } collection.grant(objectType, actionList, object, isGrant); } private void initPrivilegeCollectionAllObjects( PrivilegeCollection collection, ObjectType objectType, List<PrivilegeType> actionList) throws PrivilegeException { List<PEntryObject> objects = new ArrayList<>(); switch (objectType) { case TABLE: objects.add(provider.generateObject(objectType, Lists.newArrayList("*", "*", "*"), globalStateMgr)); collection.grant(objectType, actionList, objects, false); break; case VIEW: case MATERIALIZED_VIEW: case DATABASE: objects.add(provider.generateObject(objectType, Lists.newArrayList("*", "*"), globalStateMgr)); collection.grant(objectType, actionList, objects, false); break; case USER: objects.add(provider.generateUserObject(objectType, null, globalStateMgr)); collection.grant(objectType, actionList, objects, false); break; case RESOURCE: case CATALOG: case RESOURCE_GROUP: case STORAGE_VOLUME: objects.add(provider.generateObject(objectType, Lists.newArrayList("*"), globalStateMgr)); collection.grant(objectType, actionList, objects, false); break; case FUNCTION: objects.add(provider.generateFunctionObject(objectType, PrivilegeBuiltinConstants.ALL_DATABASE_ID, PrivilegeBuiltinConstants.ALL_FUNCTIONS_ID, globalStateMgr)); collection.grant(objectType, actionList, objects, false); break; case GLOBAL_FUNCTION: objects.add(provider.generateFunctionObject(objectType, PrivilegeBuiltinConstants.GLOBAL_FUNCTION_DEFAULT_DATABASE_ID, PrivilegeBuiltinConstants.ALL_FUNCTIONS_ID, globalStateMgr)); collection.grant(objectType, actionList, objects, false); break; case SYSTEM: collection.grant(objectType, actionList, Arrays.asList(new PEntryObject[] {null}), false); break; default: throw new PrivilegeException("unsupported type " + objectType); } } private RolePrivilegeCollection initBuiltinRoleUnlocked(long roleId, String name) { return initBuiltinRoleUnlocked(roleId, name, null); } private RolePrivilegeCollection initBuiltinRoleUnlocked(long roleId, String name, String comment) { RolePrivilegeCollection collection = new RolePrivilegeCollection( name, comment, RolePrivilegeCollection.RoleFlags.MUTABLE); roleIdToPrivilegeCollection.put(roleId, collection); roleNameToId.put(name, roleId); LOG.info("create built-in role {}[{}]", name, roleId); return collection; } private void userReadLock() { userLock.readLock().lock(); } private void userReadUnlock() { userLock.readLock().unlock(); } private void userWriteLock() { userLock.writeLock().lock(); } private void userWriteUnlock() { userLock.writeLock().unlock(); } private void roleReadLock() { roleLock.readLock().lock(); } private void roleReadUnlock() { roleLock.readLock().unlock(); } private void roleWriteLock() { roleLock.writeLock().lock(); } private void roleWriteUnlock() { roleLock.writeLock().unlock(); } public void grant(GrantPrivilegeStmt stmt) throws DdlException { try { if (stmt.getRole() != null) { grantToRole( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.isWithGrantOption(), stmt.getRole()); } else { grantToUser( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.isWithGrantOption(), stmt.getUserIdentity()); } } catch (PrivilegeException e) { throw new DdlException("failed to grant: " + e.getMessage(), e); } } protected void grantToUser( ObjectType type, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, boolean isGrant, UserIdentity userIdentity) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollection collection = getUserPrivilegeCollectionUnlocked(userIdentity); collection.grant(type, privilegeTypes, objects, isGrant); globalStateMgr.getEditLog().logUpdateUserPrivilege( userIdentity, collection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(userIdentity); } finally { userWriteUnlock(); } } protected void grantToRole( ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, boolean isGrant, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(roleId, true); collection.grant(objectType, privilegeTypes, objects, isGrant); Map<Long, RolePrivilegeCollection> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); } finally { roleWriteUnlock(); } } public void revoke(RevokePrivilegeStmt stmt) throws DdlException { try { if (stmt.getRole() != null) { revokeFromRole( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.getRole()); } else { revokeFromUser( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.getUserIdentity()); } } catch (PrivilegeException e) { throw new DdlException(e.getMessage()); } } protected void revokeFromUser( ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, UserIdentity userIdentity) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollection collection = getUserPrivilegeCollectionUnlocked(userIdentity); collection.revoke(objectType, privilegeTypes, objects); globalStateMgr.getEditLog().logUpdateUserPrivilege( userIdentity, collection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(userIdentity); } finally { userWriteUnlock(); } } protected void revokeFromRole( ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(roleId, true); collection.revoke(objectType, privilegeTypes, objects); invalidateRolesInCacheRoleUnlocked(roleId); Map<Long, RolePrivilegeCollection> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); } finally { roleWriteUnlock(); } } public void grantRole(GrantRoleStmt stmt) throws DdlException { try { if (stmt.getUserIdentity() != null) { grantRoleToUser(stmt.getGranteeRole(), stmt.getUserIdentity()); } else { grantRoleToRole(stmt.getGranteeRole(), stmt.getRole()); } } catch (PrivilegeException e) { throw new DdlException("failed to grant role: " + e.getMessage(), e); } } protected void grantRoleToUser(List<String> parentRoleName, UserIdentity user) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollection userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(user); roleReadLock(); try { for (String parentRole : parentRoleName) { long roleId = getRoleIdByNameNoLock(parentRole); if (roleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Granting role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } userPrivilegeCollection.grantRole(roleId); boolean verifyDone = false; try { Set<Long> result = getAllPredecessorsUnlocked(userPrivilegeCollection); if (result.size() > Config.privilege_max_total_roles_per_user) { LOG.warn("too many predecessor roles {} for user {}", result, user); throw new PrivilegeException(String.format( "%s has total %d predecessor roles > %d!", user, result.size(), Config.privilege_max_total_roles_per_user)); } verifyDone = true; } finally { if (!verifyDone) { userPrivilegeCollection.revokeRole(roleId); } } } } finally { roleReadUnlock(); } globalStateMgr.getEditLog().logUpdateUserPrivilege( user, userPrivilegeCollection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(user); LOG.info("grant role {} to user {}", Joiner.on(", ").join(parentRoleName), user); } finally { userWriteUnlock(); } } protected void grantRoleToRole(List<String> parentRoleName, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(roleId, true); Map<Long, RolePrivilegeCollection> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); for (String parentRole : parentRoleName) { long parentRoleId = getRoleIdByNameNoLock(parentRole); if (parentRoleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Granting role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } RolePrivilegeCollection parentCollection = getRolePrivilegeCollectionUnlocked(parentRoleId, true); Set<Long> parentRolePredecessors = getAllPredecessorsUnlocked(parentRoleId); if (parentRolePredecessors.contains(roleId)) { throw new PrivilegeException(String.format("role %s[%d] is already a predecessor role of %s[%d]", roleName, roleId, parentRole, parentRoleId)); } boolean verifyDone = false; parentCollection.addSubRole(roleId); try { parentRolePredecessors = getAllPredecessorsUnlocked(parentRoleId); parentRolePredecessors.add(parentRoleId); for (long i : parentRolePredecessors) { long cnt = getMaxRoleInheritanceDepthInner(0, i); if (cnt > Config.privilege_max_role_depth) { String name = getRolePrivilegeCollectionUnlocked(i, true).getName(); throw new PrivilegeException(String.format( "role inheritance depth for %s[%d] is %d > %d", name, i, cnt, Config.privilege_max_role_depth)); } } verifyDone = true; } finally { if (!verifyDone) { parentCollection.removeSubRole(roleId); } } collection.addParentRole(parentRoleId); rolePrivCollectionModified.put(parentRoleId, parentCollection); } invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollectionInfo info = new RolePrivilegeCollectionInfo( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); globalStateMgr.getEditLog().logUpdateRolePrivilege(info); LOG.info("grant role {}[{}] to role {}[{}]", parentRoleName, Joiner.on(", ").join(parentRoleName), roleName, roleId); } finally { roleWriteUnlock(); } } public void revokeRole(RevokeRoleStmt stmt) throws DdlException { try { if (stmt.getUserIdentity() != null) { revokeRoleFromUser(stmt.getGranteeRole(), stmt.getUserIdentity()); } else { revokeRoleFromRole(stmt.getGranteeRole(), stmt.getRole()); } } catch (PrivilegeException e) { throw new DdlException("failed to revoke role: " + e.getMessage(), e); } } protected void revokeRoleFromUser(List<String> roleNameList, UserIdentity user) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollection collection = getUserPrivilegeCollectionUnlocked(user); roleReadLock(); try { for (String roleName : roleNameList) { long roleId = getRoleIdByNameNoLock(roleName); if (roleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Revoking role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } collection.revokeRole(roleId); } } finally { roleReadUnlock(); } globalStateMgr.getEditLog().logUpdateUserPrivilege( user, collection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(user); LOG.info("revoke role {} from user {}", roleNameList.toString(), user); } finally { userWriteUnlock(); } } protected void revokeRoleFromRole(List<String> parentRoleNameList, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(roleId, true); for (String parentRoleName : parentRoleNameList) { long parentRoleId = getRoleIdByNameNoLock(parentRoleName); if (parentRoleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Revoking role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } RolePrivilegeCollection parentCollection = getRolePrivilegeCollectionUnlocked(parentRoleId, true); parentCollection.removeSubRole(roleId); collection.removeParentRole(parentRoleId); } Map<Long, RolePrivilegeCollection> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); List<Long> parentRoleIdList = new ArrayList<>(); for (String parentRoleName : parentRoleNameList) { long parentRoleId = getRoleIdByNameNoLock(parentRoleName); RolePrivilegeCollection parentCollection = getRolePrivilegeCollectionUnlocked(parentRoleId, true); parentRoleIdList.add(parentRoleId); rolePrivCollectionModified.put(parentRoleId, parentCollection); } RolePrivilegeCollectionInfo info = new RolePrivilegeCollectionInfo( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); globalStateMgr.getEditLog().logUpdateRolePrivilege(info); invalidateRolesInCacheRoleUnlocked(roleId); LOG.info("revoke role {}[{}] from role {}[{}]", parentRoleNameList.toString(), parentRoleIdList.toString(), roleName, roleId); } finally { roleWriteUnlock(); } } public void validateGrant(ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects) throws PrivilegeException { provider.validateGrant(objectType, privilegeTypes, objects); } public static Set<Long> getOwnedRolesByUser(UserIdentity userIdentity) throws PrivilegeException { AuthorizationMgr manager = GlobalStateMgr.getCurrentState().getAuthorizationMgr(); try { manager.userReadLock(); UserPrivilegeCollection userCollection = manager.getUserPrivilegeCollectionUnlocked(userIdentity); return userCollection.getAllRoles(); } finally { manager.userReadUnlock(); } } protected boolean checkAction( PrivilegeCollection collection, ObjectType objectType, PrivilegeType privilegeType, List<String> objectNames) throws PrivilegeException { if (objectNames == null) { return provider.check(objectType, privilegeType, null, collection); } else { PEntryObject object = provider.generateObject( objectType, objectNames, globalStateMgr); return provider.check(objectType, privilegeType, object, collection); } } public boolean canExecuteAs(ConnectContext context, UserIdentity impersonateUser) { try { PrivilegeCollection collection = mergePrivilegeCollection(context.getCurrentUserIdentity(), context.getCurrentRoleIds()); PEntryObject object = provider.generateUserObject(ObjectType.USER, impersonateUser, globalStateMgr); return provider.check(ObjectType.USER, PrivilegeType.IMPERSONATE, object, collection); } catch (PrivilegeException e) { LOG.warn("caught exception in canExecuteAs() user[{}]", impersonateUser, e); return false; } } public boolean allowGrant(ConnectContext context, ObjectType type, List<PrivilegeType> wants, List<PEntryObject> objects) { try { PrivilegeCollection collection = mergePrivilegeCollection(context.getCurrentUserIdentity(), context.getCurrentRoleIds()); return checkAction(collection, ObjectType.SYSTEM, PrivilegeType.GRANT, null) || provider.allowGrant(type, wants, objects, collection); } catch (PrivilegeException e) { LOG.warn("caught exception when allowGrant", e); return false; } } public void replayUpdateUserPrivilegeCollection( UserIdentity user, UserPrivilegeCollection privilegeCollection, short pluginId, short pluginVersion) throws PrivilegeException { userWriteLock(); try { provider.upgradePrivilegeCollection(privilegeCollection, pluginId, pluginVersion); userToPrivilegeCollection.put(user, privilegeCollection); invalidateUserInCache(user); LOG.info("replayed update user {}", user); } finally { userWriteUnlock(); } } /** * init all builtin privilege when a user is created, called by AuthenticationManager */ public UserPrivilegeCollection onCreateUser(UserIdentity user, List<String> defaultRoleName) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollection privilegeCollection = new UserPrivilegeCollection(); if (!defaultRoleName.isEmpty()) { Set<Long> roleIds = new HashSet<>(); for (String role : defaultRoleName) { Long roleId = getRoleIdByNameNoLock(role); privilegeCollection.grantRole(roleId); roleIds.add(roleId); } privilegeCollection.setDefaultRoleIds(roleIds); } userToPrivilegeCollection.put(user, privilegeCollection); LOG.info("user privilege for {} is created, role {} is granted", user, PrivilegeBuiltinConstants.PUBLIC_ROLE_NAME); return privilegeCollection; } finally { userWriteUnlock(); } } /** * drop user privilege collection when a user is dropped, called by AuthenticationManager */ public void onDropUser(UserIdentity user) { userWriteLock(); try { userToPrivilegeCollection.remove(user); invalidateUserInCache(user); } finally { userWriteUnlock(); } } public short getProviderPluginId() { return provider.getPluginId(); } public short getProviderPluginVersion() { return provider.getPluginVersion(); } /** * read from cache */ protected PrivilegeCollection mergePrivilegeCollection(UserIdentity userIdentity, Set<Long> roleIds) throws PrivilegeException { try { return ctxToMergedPrivilegeCollections.get(new Pair<>(userIdentity, roleIds)); } catch (ExecutionException e) { String errMsg = String.format("failed merge privilege collection on %s with roles %s", userIdentity, roleIds); PrivilegeException exception = new PrivilegeException(errMsg); exception.initCause(e); throw exception; } } /** * used for cache to do the actual merge job */ protected PrivilegeCollection loadPrivilegeCollection(UserIdentity userIdentity, Set<Long> roleIdsSpecified) throws PrivilegeException { PrivilegeCollection collection = new PrivilegeCollection(); try { userReadLock(); Set<Long> validRoleIds; if (userIdentity.isEphemeral()) { Preconditions.checkState(roleIdsSpecified != null, "ephemeral use should always have current role ids specified"); validRoleIds = roleIdsSpecified; } else { UserPrivilegeCollection userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); collection.merge(userPrivilegeCollection); validRoleIds = new HashSet<>(userPrivilegeCollection.getAllRoles()); if (roleIdsSpecified != null) { validRoleIds.retainAll(roleIdsSpecified); } } try { roleReadLock(); validRoleIds = getAllPredecessorsUnlocked(validRoleIds); for (long roleId : validRoleIds) { RolePrivilegeCollection rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, false); if (rolePrivilegeCollection != null) { collection.merge(rolePrivilegeCollection); } } RolePrivilegeCollection rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(PrivilegeBuiltinConstants.PUBLIC_ROLE_ID, false); if (rolePrivilegeCollection != null) { collection.merge(rolePrivilegeCollection); } } finally { roleReadUnlock(); } } finally { userReadUnlock(); } return collection; } /** * if the privileges of a role are changed, call this function to invalidate cache * requires role lock */ protected void invalidateRolesInCacheRoleUnlocked(long roleId) throws PrivilegeException { Set<Long> badRoles = getAllDescendantsUnlocked(roleId); List<Pair<UserIdentity, Set<Long>>> badKeys = new ArrayList<>(); for (Pair<UserIdentity, Set<Long>> pair : ctxToMergedPrivilegeCollections.asMap().keySet()) { Set<Long> roleIds = pair.second; if (roleIds == null) { roleIds = getRoleIdsByUser(pair.first); } for (long badRoleId : badRoles) { if (roleIds.contains(badRoleId)) { badKeys.add(pair); break; } } } for (Pair<UserIdentity, Set<Long>> pair : badKeys) { ctxToMergedPrivilegeCollections.invalidate(pair); } } /** * if the privileges of a user are changed, call this function to invalidate cache * require not extra lock. */ protected void invalidateUserInCache(UserIdentity userIdentity) { List<Pair<UserIdentity, Set<Long>>> badKeys = new ArrayList<>(); for (Pair<UserIdentity, Set<Long>> pair : ctxToMergedPrivilegeCollections.asMap().keySet()) { if (pair.first.equals(userIdentity)) { badKeys.add(pair); } } for (Pair<UserIdentity, Set<Long>> pair : badKeys) { ctxToMergedPrivilegeCollections.invalidate(pair); } } public UserPrivilegeCollection getUserPrivilegeCollection(UserIdentity userIdentity) { userReadLock(); try { return userToPrivilegeCollection.get(userIdentity); } finally { userReadUnlock(); } } public UserPrivilegeCollection getUserPrivilegeCollectionUnlocked(UserIdentity userIdentity) throws PrivilegeException { UserPrivilegeCollection userCollection = userToPrivilegeCollection.get(userIdentity); if (userCollection == null) { throw new PrivilegeException("cannot find user " + (userIdentity == null ? "null" : userIdentity.toString())); } return userCollection; } public List<String> getAllUsers() { userReadLock(); try { List<String> users = Lists.newArrayList(); Set<UserIdentity> userIdentities = userToPrivilegeCollection.keySet(); for (UserIdentity userIdentity : userIdentities) { users.add(userIdentity.toString()); } return users; } finally { userReadUnlock(); } } public Set<UserIdentity> getAllUserIdentities() { userReadLock(); try { List<String> users = Lists.newArrayList(); Set<UserIdentity> userIdentities = userToPrivilegeCollection.keySet(); return userIdentities; } finally { userReadUnlock(); } } protected UserPrivilegeCollection getUserPrivilegeCollectionUnlockedAllowNull(UserIdentity userIdentity) { return userToPrivilegeCollection.get(userIdentity); } public RolePrivilegeCollection getRolePrivilegeCollection(String roleName) { roleReadLock(); try { Long roleId = roleNameToId.get(roleName); if (roleId == null) { return null; } return roleIdToPrivilegeCollection.get(roleId); } finally { roleReadUnlock(); } } public RolePrivilegeCollection getRolePrivilegeCollection(long roleId) { roleReadLock(); try { return roleIdToPrivilegeCollection.get(roleId); } finally { roleReadUnlock(); } } public RolePrivilegeCollection getRolePrivilegeCollectionUnlocked(long roleId, boolean exceptionIfNotExists) throws PrivilegeException { RolePrivilegeCollection collection = roleIdToPrivilegeCollection.get(roleId); if (collection == null) { if (exceptionIfNotExists) { throw new PrivilegeException("cannot find role" + roleId); } else { return null; } } return collection; } public List<String> getGranteeRoleDetailsForRole(String roleName) { roleReadLock(); try { Long roleId = getRoleIdByNameAllowNull(roleName); if (roleId == null) { throw new SemanticException("cannot find role " + roleName); } RolePrivilegeCollection rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, true); List<String> parentRoleNameList = new ArrayList<>(); for (Long parentRoleId : rolePrivilegeCollection.getParentRoleIds()) { RolePrivilegeCollection parentRolePriv = getRolePrivilegeCollectionUnlocked(parentRoleId, false); if (parentRolePriv != null) { parentRoleNameList.add(parentRolePriv.getName()); } } if (!parentRoleNameList.isEmpty()) { return Lists.newArrayList(roleName, null, AstToSQLBuilder.toSQL(new GrantRoleStmt(parentRoleNameList, roleName))); } return null; } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { roleReadUnlock(); } } public Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> getTypeToPrivilegeEntryListByRole(String roleName) { roleReadLock(); try { Long roleId = getRoleIdByNameAllowNull(roleName); if (roleId == null) { throw new SemanticException("cannot find role " + roleName); } RolePrivilegeCollection rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, true); return rolePrivilegeCollection.getTypeToPrivilegeEntryList(); } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { roleReadUnlock(); } } public List<String> getGranteeRoleDetailsForUser(UserIdentity userIdentity) { userReadLock(); try { UserPrivilegeCollection userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); Set<Long> allRoles = userPrivilegeCollection.getAllRoles(); roleReadLock(); try { List<String> parentRoleNameList = new ArrayList<>(); for (Long roleId : allRoles) { RolePrivilegeCollection parentRolePriv = getRolePrivilegeCollectionUnlocked(roleId, false); if (parentRolePriv != null) { parentRoleNameList.add(parentRolePriv.getName()); } } if (!parentRoleNameList.isEmpty()) { return Lists.newArrayList(userIdentity.toString(), null, AstToSQLBuilder.toSQL(new GrantRoleStmt(parentRoleNameList, userIdentity))); } return null; } finally { roleReadUnlock(); } } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { userReadUnlock(); } } public Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> getTypeToPrivilegeEntryListByUser( UserIdentity userIdentity) { userReadLock(); try { UserPrivilegeCollection userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); return userPrivilegeCollection.getTypeToPrivilegeEntryList(); } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { userReadUnlock(); } } public Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> getMergedTypeToPrivilegeEntryListByUser( UserIdentity userIdentity) { userReadLock(); try { UserPrivilegeCollection userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); PrivilegeCollection collection = mergePrivilegeCollection(userIdentity, userPrivilegeCollection.getAllRoles()); return collection.getTypeToPrivilegeEntryList(); } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { userReadUnlock(); } } public List<String> getAllRoles() { roleReadLock(); try { List<String> roles = new ArrayList<>(); for (RolePrivilegeCollection rolePrivilegeCollection : roleIdToPrivilegeCollection.values()) { roles.add(rolePrivilegeCollection.getName()); } return roles; } finally { roleReadUnlock(); } } public List<PrivilegeType> analyzeActionSet(ObjectType objectType, ActionSet actionSet) { List<PrivilegeType> privilegeTypes = provider.getAvailablePrivType(objectType); List<PrivilegeType> actions = new ArrayList<>(); for (PrivilegeType actionName : privilegeTypes) { if (actionSet.contains(actionName)) { actions.add(actionName); } } return actions; } public boolean isAvailablePrivType(ObjectType objectType, PrivilegeType privilegeType) { return provider.isAvailablePrivType(objectType, privilegeType); } public List<PrivilegeType> getAvailablePrivType(ObjectType objectType) { return provider.getAvailablePrivType(objectType); } public void createRole(CreateRoleStmt stmt) { roleWriteLock(); try { Map<String, Long> roleNameToBeCreated = new HashMap<>(); Map<Long, RolePrivilegeCollection> rolePrivCollectionModified = new HashMap<>(); for (String roleName : stmt.getRoles()) { if (roleNameToId.containsKey(roleName)) { LOG.info("Operation CREATE ROLE failed for " + roleName + " : role " + roleName + " already exists"); return; } long roleId = globalStateMgr.getNextId(); RolePrivilegeCollection collection = new RolePrivilegeCollection( roleName, stmt.getComment(), RolePrivilegeCollection.RoleFlags.REMOVABLE, RolePrivilegeCollection.RoleFlags.MUTABLE); rolePrivCollectionModified.put(roleId, collection); roleNameToBeCreated.put(roleName, roleId); } roleIdToPrivilegeCollection.putAll(rolePrivCollectionModified); roleNameToId.putAll(roleNameToBeCreated); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); LOG.info("created role {}[{}]", stmt.getRoles().toString(), roleNameToBeCreated.values()); } finally { roleWriteUnlock(); } } public void alterRole(AlterRoleStmt stmt) throws DdlException { try { roleWriteLock(); Map<Long, RolePrivilegeCollection> rolePrivCollectionModified = new HashMap<>(); for (String roleName : stmt.getRoles()) { if (!roleNameToId.containsKey(roleName)) { throw new DdlException(roleName + " doesn't exist"); } long roleId = roleNameToId.get(roleName); RolePrivilegeCollection rolePrivilegeCollection = roleIdToPrivilegeCollection.get(roleId); Preconditions.checkNotNull(rolePrivilegeCollection); if (!rolePrivilegeCollection.isMutable()) { throw new DdlException(roleName + " is immutable"); } rolePrivCollectionModified.put(roleId, rolePrivilegeCollection); } rolePrivCollectionModified.values().forEach( rolePrivilegeCollection -> rolePrivilegeCollection.setComment(stmt.getComment())); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); } finally { roleWriteUnlock(); } } public void replayUpdateRolePrivilegeCollection( RolePrivilegeCollectionInfo info) throws PrivilegeException { roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollection> entry : info.getRolePrivCollectionModified().entrySet()) { long roleId = entry.getKey(); invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollection privilegeCollection = entry.getValue(); provider.upgradePrivilegeCollection(privilegeCollection, info.getPluginId(), info.getPluginVersion()); roleIdToPrivilegeCollection.put(roleId, privilegeCollection); if (!roleNameToId.containsKey(privilegeCollection.getName())) { roleNameToId.put(privilegeCollection.getName(), roleId); } LOG.info("replayed update role {}", roleId); } } finally { roleWriteUnlock(); } } public void dropRole(DropRoleStmt stmt) throws DdlException { roleWriteLock(); try { List<String> roleNameToBeDropped = new ArrayList<>(); Map<Long, RolePrivilegeCollection> rolePrivCollectionModified = new HashMap<>(); for (String roleName : stmt.getRoles()) { if (!roleNameToId.containsKey(roleName)) { LOG.info("Operation DROP ROLE failed for " + roleName + " : role " + roleName + " not exists"); return; } long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollection collection = roleIdToPrivilegeCollection.get(roleId); if (!collection.isRemovable()) { throw new DdlException("role " + roleName + " cannot be dropped!"); } roleNameToBeDropped.add(roleName); rolePrivCollectionModified.put(roleId, collection); invalidateRolesInCacheRoleUnlocked(roleId); } roleIdToPrivilegeCollection.keySet().removeAll(rolePrivCollectionModified.keySet()); roleNameToBeDropped.forEach(roleNameToId.keySet()::remove); globalStateMgr.getEditLog().logDropRole( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); LOG.info("dropped role {}[{}]", stmt.getRoles().toString(), rolePrivCollectionModified.keySet().toString()); } catch (PrivilegeException e) { throw new DdlException("failed to drop role: " + e.getMessage(), e); } finally { roleWriteUnlock(); } } public void replayDropRole( RolePrivilegeCollectionInfo info) throws PrivilegeException { roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollection> entry : info.getRolePrivCollectionModified().entrySet()) { long roleId = entry.getKey(); invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollection privilegeCollection = entry.getValue(); provider.upgradePrivilegeCollection(privilegeCollection, info.getPluginId(), info.getPluginVersion()); roleIdToPrivilegeCollection.remove(roleId); roleNameToId.remove(privilegeCollection.getName()); LOG.info("replayed drop role {}", roleId); } } finally { roleWriteUnlock(); } } public boolean checkRoleExists(String name) { roleReadLock(); try { return roleNameToId.containsKey(name); } finally { roleReadUnlock(); } } public boolean isBuiltinRole(String name) { return PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_NAMES.contains(name); } public String getRoleComment(String name) { try { roleReadLock(); String result = FeConstants.NULL_STRING; Long roleId = roleNameToId.get(name); if (roleId != null) { String comment = roleIdToPrivilegeCollection.get(roleId).getComment(); if (!Strings.isNullOrEmpty(comment)) { result = comment; } } return result; } finally { roleReadUnlock(); } } public Set<Long> getRoleIdsByUser(UserIdentity user) throws PrivilegeException { userReadLock(); try { Set<Long> ret = new HashSet<>(); roleReadLock(); try { for (long roleId : getUserPrivilegeCollectionUnlocked(user).getAllRoles()) { if (getRolePrivilegeCollectionUnlocked(roleId, false) != null) { ret.add(roleId); } } return ret; } finally { roleReadUnlock(); } } finally { userReadUnlock(); } } public Set<Long> getDefaultRoleIdsByUser(UserIdentity user) throws PrivilegeException { userReadLock(); try { Set<Long> ret = new HashSet<>(); roleReadLock(); try { for (long roleId : getUserPrivilegeCollectionUnlocked(user).getDefaultRoleIds()) { if (getRolePrivilegeCollectionUnlocked(roleId, false) != null) { ret.add(roleId); } } return ret; } finally { roleReadUnlock(); } } finally { userReadUnlock(); } } public void setUserDefaultRole(Set<Long> roleName, UserIdentity user) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollection collection = getUserPrivilegeCollectionUnlocked(user); roleReadLock(); try { collection.setDefaultRoleIds(roleName); } finally { roleReadUnlock(); } globalStateMgr.getEditLog().logUpdateUserPrivilege( user, collection, provider.getPluginId(), provider.getPluginVersion()); LOG.info("grant role {} to user {}", roleName, user); } finally { userWriteUnlock(); } } public List<String> getRoleNamesByUser(UserIdentity user) throws PrivilegeException { try { userReadLock(); List<String> roleNameList = Lists.newArrayList(); try { roleReadLock(); for (long roleId : getUserPrivilegeCollectionUnlocked(user).getAllRoles()) { RolePrivilegeCollection rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, false); if (rolePrivilegeCollection != null) { roleNameList.add(rolePrivilegeCollection.getName()); } } return roleNameList; } finally { roleReadUnlock(); } } finally { userReadUnlock(); } } public Long getRoleIdByNameAllowNull(String name) { roleReadLock(); try { return roleNameToId.get(name); } finally { roleReadUnlock(); } } protected Long getRoleIdByNameNoLock(String name) throws PrivilegeException { Long roleId = roleNameToId.get(name); if (roleId == null) { throw new PrivilegeException(String.format("Role %s doesn't exist!", name)); } return roleId; } public PEntryObject generateObject(ObjectType objectType, List<String> objectTokenList) throws PrivilegeException { if (objectTokenList == null) { return null; } return this.provider.generateObject(objectType, objectTokenList, globalStateMgr); } public PEntryObject generateUserObject(ObjectType objectType, UserIdentity user) throws PrivilegeException { return this.provider.generateUserObject(objectType, user, globalStateMgr); } public PEntryObject generateFunctionObject(ObjectType objectType, Long databaseId, Long functionId) throws PrivilegeException { return this.provider.generateFunctionObject(objectType, databaseId, functionId, globalStateMgr); } /** * remove invalid object periodically * <p> * lock order should always be: * AuthenticationManager.lock -> AuthorizationManager.userLock -> AuthorizationManager.roleLock */ public void removeInvalidObject() { userWriteLock(); try { for (Map.Entry<UserIdentity, UserPrivilegeCollection> userPrivEntry : userToPrivilegeCollection.entrySet()) { userPrivEntry.getValue().removeInvalidObject(globalStateMgr); } roleReadLock(); try { for (Map.Entry<UserIdentity, UserPrivilegeCollection> userPrivEntry : userToPrivilegeCollection.entrySet()) { removeInvalidRolesUnlocked(userPrivEntry.getValue().getAllRoles()); removeInvalidRolesUnlocked(userPrivEntry.getValue().getDefaultRoleIds()); } } finally { roleReadUnlock(); } } finally { userWriteUnlock(); } userReadLock(); try { roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollection> rolePrivEntry : roleIdToPrivilegeCollection.entrySet()) { rolePrivEntry.getValue().removeInvalidObject(globalStateMgr); } } finally { roleWriteUnlock(); } } finally { userReadUnlock(); } roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollection> rolePrivEntry : roleIdToPrivilegeCollection.entrySet()) { RolePrivilegeCollection collection = rolePrivEntry.getValue(); removeInvalidRolesUnlocked(collection.getParentRoleIds()); removeInvalidRolesUnlocked(collection.getSubRoleIds()); } } finally { roleWriteUnlock(); } } private void removeInvalidRolesUnlocked(Set<Long> roleIds) { roleIds.removeIf(aLong -> !roleIdToPrivilegeCollection.containsKey(aLong)); } /** * get max role inheritance depth * e.g. grant role_a to role role_b; grant role_b to role role_c; * then the inheritance graph would be role_a -> role_b -> role_c * the role inheritance depth for role_a would be 2, for role_b would be 1, for role_c would be 0 */ protected long getMaxRoleInheritanceDepthInner(long currentDepth, long roleId) throws PrivilegeException { RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(roleId, false); if (collection == null) { return currentDepth - 1; } Set<Long> subRoleIds = collection.getSubRoleIds(); if (subRoleIds.isEmpty()) { return currentDepth; } else { long maxDepth = -1; for (long subRoleId : subRoleIds) { maxDepth = Math.max(maxDepth, getMaxRoleInheritanceDepthInner(currentDepth + 1, subRoleId)); } return maxDepth; } } /** * get all descendants roles(sub roles and their subs etc.) * e.g. grant role_a to role role_b; grant role_b to role role_c; * then the inheritance graph would be role_a -> role_b -> role_c * then all descendants roles of role_a would be [role_b, role_c] */ protected Set<Long> getAllDescendantsUnlocked(long roleId) throws PrivilegeException { Set<Long> set = new HashSet<>(); set.add(roleId); getAllDescendantsUnlockedInner(roleId, set); return set; } protected void getAllDescendantsUnlockedInner(long roleId, Set<Long> resultSet) throws PrivilegeException { RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(roleId, false); if (collection == null) { return; } for (Long subId : collection.getSubRoleIds()) { if (!resultSet.contains(subId)) { resultSet.add(subId); getAllDescendantsUnlockedInner(subId, resultSet); } } } /** * get all predecessors roles (parent roles and their parents etc.) * e.g. grant role_a to role role_b; grant role_b to role role_c; * then the inheritance graph would be role_a -> role_b -> role_c * then all parent roles of role_c would be [role_a, role_b] */ protected Set<Long> getAllPredecessorsUnlocked(UserPrivilegeCollection collection) throws PrivilegeException { return getAllPredecessorsUnlocked(collection.getAllRoles()); } protected Set<Long> getAllPredecessorsUnlocked(long roleId) throws PrivilegeException { Set<Long> set = new HashSet<>(); set.add(roleId); return getAllPredecessorsUnlocked(set); } protected Set<Long> getAllPredecessorsUnlocked(Set<Long> initialRoleIds) throws PrivilegeException { Set<Long> result = new HashSet<>(initialRoleIds); for (long roleId : initialRoleIds) { getAllPredecessorsInner(roleId, result); } return result; } protected void getAllPredecessorsInner(long roleId, Set<Long> resultSet) throws PrivilegeException { RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(roleId, false); if (collection == null) { resultSet.remove(roleId); return; } for (Long parentId : collection.getParentRoleIds()) { if (!resultSet.contains(parentId)) { resultSet.add(parentId); getAllPredecessorsInner(parentId, resultSet); } } } /** * Use new image format by SRMetaBlockWriter/SRMetaBlockReader * +------------------+ * | header | * +------------------+ * | | * | Authorization- | * | Manager | * | | * +------------------+ * | numUser | * +------------------+ * | User | * | Privilege | * | Collection 1 | * +------------------+ * | User | * | Privilege | * | Collection 2 | * +------------------+ * | ... | * +------------------+ * | numRole | * +------------------+ * | Role | * | Privilege | * | Collection 1 | * +------------------+ * | Role | * | Privilege | * | Collection 1 | * +------------------+ * | ... | * +------------------+ * | footer | * +------------------+ */ public void save(DataOutputStream dos) throws IOException { try { final int cnt = 1 + 1 + userToPrivilegeCollection.size() * 2 + 1 + roleIdToPrivilegeCollection.size() * 2; SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, "com.starrocks.privilege.AuthorizationManager", cnt); writer.writeJson(this); writer.writeJson(userToPrivilegeCollection.size()); Iterator<Map.Entry<UserIdentity, UserPrivilegeCollection>> iterator = userToPrivilegeCollection.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<UserIdentity, UserPrivilegeCollection> entry = iterator.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.writeJson(roleIdToPrivilegeCollection.size()); Iterator<Map.Entry<Long, RolePrivilegeCollection>> roleIter = roleIdToPrivilegeCollection.entrySet().iterator(); while (roleIter.hasNext()) { Map.Entry<Long, RolePrivilegeCollection> entry = roleIter.next(); writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } writer.close(); } catch (SRMetaBlockException e) { throw new IOException("failed to save AuthenticationManager!", e); } } public static AuthorizationMgr load( DataInputStream dis, GlobalStateMgr globalStateMgr, AuthorizationProvider provider) throws IOException, DdlException { try { SRMetaBlockReader reader = new SRMetaBlockReader(dis, "com.starrocks.privilege.AuthorizationManager"); AuthorizationMgr ret = null; try { ret = reader.readJson(AuthorizationMgr.class); ret.globalStateMgr = globalStateMgr; if (provider == null) { ret.provider = new DefaultAuthorizationProvider(); } else { ret.provider = provider; } ret.initBuiltinRolesAndUsers(); int numUser = reader.readJson(int.class); LOG.info("loading {} users", numUser); for (int i = 0; i != numUser; ++i) { UserIdentity userIdentity = reader.readJson(UserIdentity.class); UserPrivilegeCollection collection = reader.readJson(UserPrivilegeCollection.class); if (userIdentity.equals(UserIdentity.ROOT)) { UserPrivilegeCollection rootUserPrivCollection = ret.getUserPrivilegeCollectionUnlocked(UserIdentity.ROOT); collection.grantRoles(rootUserPrivCollection.getAllRoles()); collection.setDefaultRoleIds(rootUserPrivCollection.getDefaultRoleIds()); collection.typeToPrivilegeEntryList = rootUserPrivCollection.typeToPrivilegeEntryList; } ret.provider.upgradePrivilegeCollection(collection, ret.pluginId, ret.pluginVersion); ret.userToPrivilegeCollection.put(userIdentity, collection); } int numRole = reader.readJson(int.class); LOG.info("loading {} roles", numRole); for (int i = 0; i != numRole; ++i) { Long roleId = reader.readJson(Long.class); RolePrivilegeCollection collection = reader.readJson(RolePrivilegeCollection.class); if (PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_IDS.contains(roleId)) { RolePrivilegeCollection builtInRolePrivilegeCollection = ret.roleIdToPrivilegeCollection.get(roleId); collection.typeToPrivilegeEntryList = builtInRolePrivilegeCollection.typeToPrivilegeEntryList; } ret.provider.upgradePrivilegeCollection(collection, ret.pluginId, ret.pluginVersion); ret.roleIdToPrivilegeCollection.put(roleId, collection); } } catch (SRMetaBlockEOFException eofException) { LOG.warn("got EOF exception, ignore, ", eofException); } finally { reader.close(); } assert ret != null; LOG.info("loaded {} users, {} roles", ret.userToPrivilegeCollection.size(), ret.roleIdToPrivilegeCollection.size()); ret.isLoaded = true; return ret; } catch (SRMetaBlockException | PrivilegeException e) { throw new DdlException("failed to load AuthorizationManager!", e); } } public boolean isLoaded() { return isLoaded; } public void setLoaded(boolean loaded) { isLoaded = loaded; } /** * these public interfaces are for AuthUpgrader to upgrade from 2.x */ public void upgradeUserInitPrivilegeUnlock(UserIdentity userIdentity, UserPrivilegeCollection collection) { userToPrivilegeCollection.put(userIdentity, collection); LOG.info("upgrade user {}", userIdentity); } public void upgradeRoleInitPrivilegeUnlock(long roleId, RolePrivilegeCollection collection) { roleIdToPrivilegeCollection.put(roleId, collection); roleNameToId.put(collection.getName(), roleId); LOG.info("upgrade role {}[{}]", collection.getName(), roleId); } public void grantStorageVolumeUsageToPublicRole(String storageVolumeId) throws PrivilegeException { roleWriteLock(); try { RolePrivilegeCollection collection = getRolePrivilegeCollectionUnlocked(PrivilegeBuiltinConstants.PUBLIC_ROLE_ID, true); List<PEntryObject> object = Collections.singletonList(new StorageVolumePEntryObject(storageVolumeId)); collection.grant(ObjectType.STORAGE_VOLUME, Collections.singletonList(PrivilegeType.USAGE), object, false); } finally { roleWriteUnlock(); } } public void loadV2(SRMetaBlockReader reader) throws IOException, SRMetaBlockException, SRMetaBlockEOFException { AuthorizationMgr ret = null; try { ret = reader.readJson(AuthorizationMgr.class); ret.globalStateMgr = globalStateMgr; if (provider == null) { ret.provider = new DefaultAuthorizationProvider(); } else { ret.provider = provider; } ret.initBuiltinRolesAndUsers(); int numUser = reader.readJson(int.class); LOG.info("loading {} users", numUser); for (int i = 0; i != numUser; ++i) { UserIdentity userIdentity = reader.readJson(UserIdentity.class); UserPrivilegeCollection collection = reader.readJson(UserPrivilegeCollection.class); if (userIdentity.equals(UserIdentity.ROOT)) { UserPrivilegeCollection rootUserPrivCollection = ret.getUserPrivilegeCollectionUnlocked(UserIdentity.ROOT); collection.grantRoles(rootUserPrivCollection.getAllRoles()); collection.setDefaultRoleIds(rootUserPrivCollection.getDefaultRoleIds()); collection.typeToPrivilegeEntryList = rootUserPrivCollection.typeToPrivilegeEntryList; } ret.userToPrivilegeCollection.put(userIdentity, collection); } int numRole = reader.readJson(int.class); LOG.info("loading {} roles", numRole); for (int i = 0; i != numRole; ++i) { Long roleId = reader.readJson(Long.class); RolePrivilegeCollection collection = reader.readJson(RolePrivilegeCollection.class); if (PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_IDS.contains(roleId)) { RolePrivilegeCollection builtInRolePrivilegeCollection = ret.roleIdToPrivilegeCollection.get(roleId); collection.typeToPrivilegeEntryList = builtInRolePrivilegeCollection.typeToPrivilegeEntryList; } ret.roleIdToPrivilegeCollection.put(roleId, collection); } assert ret != null; LOG.info("loaded {} users, {} roles", ret.userToPrivilegeCollection.size(), ret.roleIdToPrivilegeCollection.size()); isLoaded = true; userToPrivilegeCollection = ret.userToPrivilegeCollection; roleIdToPrivilegeCollection = ret.roleIdToPrivilegeCollection; } catch (PrivilegeException e) { throw new IOException("failed to load AuthorizationManager!", e); } } }
class AuthorizationMgr { private static final Logger LOG = LogManager.getLogger(AuthorizationMgr.class); @SerializedName(value = "r") private final Map<String, Long> roleNameToId; @SerializedName(value = "i") private short pluginId; @SerializedName(value = "v") private short pluginVersion; protected AuthorizationProvider provider; private GlobalStateMgr globalStateMgr; protected Map<UserIdentity, UserPrivilegeCollectionV2> userToPrivilegeCollection; protected Map<Long, RolePrivilegeCollectionV2> roleIdToPrivilegeCollection; private static final int MAX_NUM_CACHED_MERGED_PRIVILEGE_COLLECTION = 1000; private static final int CACHED_MERGED_PRIVILEGE_COLLECTION_EXPIRE_MIN = 60; protected LoadingCache<Pair<UserIdentity, Set<Long>>, PrivilegeCollectionV2> ctxToMergedPrivilegeCollections = CacheBuilder.newBuilder() .maximumSize(MAX_NUM_CACHED_MERGED_PRIVILEGE_COLLECTION) .expireAfterAccess(CACHED_MERGED_PRIVILEGE_COLLECTION_EXPIRE_MIN, TimeUnit.MINUTES) .build(new CacheLoader<Pair<UserIdentity, Set<Long>>, PrivilegeCollectionV2>() { @Override public PrivilegeCollectionV2 load(@NotNull Pair<UserIdentity, Set<Long>> userIdentitySetPair) throws Exception { return loadPrivilegeCollection(userIdentitySetPair.first, userIdentitySetPair.second); } }); private final ReentrantReadWriteLock userLock; private final ReentrantReadWriteLock roleLock; private boolean isLoaded = false; protected AuthorizationMgr() { roleNameToId = new HashMap<>(); userToPrivilegeCollection = new HashMap<>(); roleIdToPrivilegeCollection = new HashMap<>(); userLock = new ReentrantReadWriteLock(); roleLock = new ReentrantReadWriteLock(); } public AuthorizationMgr(GlobalStateMgr globalStateMgr, AuthorizationProvider provider) { this.globalStateMgr = globalStateMgr; if (provider == null) { this.provider = new DefaultAuthorizationProvider(); } else { this.provider = provider; } pluginId = this.provider.getPluginId(); pluginVersion = this.provider.getPluginVersion(); roleNameToId = new HashMap<>(); userLock = new ReentrantReadWriteLock(); roleLock = new ReentrantReadWriteLock(); userToPrivilegeCollection = new HashMap<>(); roleIdToPrivilegeCollection = new HashMap<>(); initBuiltinRolesAndUsers(); } public void initBuiltinRolesAndUsers() { try { RolePrivilegeCollectionV2 rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.ROOT_ROLE_ID, PrivilegeBuiltinConstants.ROOT_ROLE_NAME, "built-in root role which has all privileges on all objects"); for (ObjectType objectType : provider.getAllPrivObjectTypes()) { initPrivilegeCollectionAllObjects(rolePrivilegeCollection, objectType, provider.getAvailablePrivType(objectType)); } rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.DB_ADMIN_ROLE_ID, PrivilegeBuiltinConstants.DB_ADMIN_ROLE_NAME, "built-in database administration role"); List<PrivilegeType> actionWithoutNodeGrant = provider.getAvailablePrivType(ObjectType.SYSTEM).stream().filter( x -> !x.equals(PrivilegeType.GRANT) && !x.equals(PrivilegeType.NODE)).collect(Collectors.toList()); initPrivilegeCollections(rolePrivilegeCollection, ObjectType.SYSTEM, actionWithoutNodeGrant, null, false); for (ObjectType t : Arrays.asList( ObjectType.CATALOG, ObjectType.DATABASE, ObjectType.TABLE, ObjectType.VIEW, ObjectType.MATERIALIZED_VIEW, ObjectType.RESOURCE, ObjectType.RESOURCE_GROUP, ObjectType.FUNCTION, ObjectType.GLOBAL_FUNCTION, ObjectType.STORAGE_VOLUME)) { initPrivilegeCollectionAllObjects(rolePrivilegeCollection, t, provider.getAvailablePrivType(t)); } rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.CLUSTER_ADMIN_ROLE_ID, PrivilegeBuiltinConstants.CLUSTER_ADMIN_ROLE_NAME, "built-in cluster administration role"); initPrivilegeCollections( rolePrivilegeCollection, ObjectType.SYSTEM, Collections.singletonList(PrivilegeType.NODE), null, false); rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.USER_ADMIN_ROLE_ID, PrivilegeBuiltinConstants.USER_ADMIN_ROLE_NAME, "built-in user administration role"); initPrivilegeCollections( rolePrivilegeCollection, ObjectType.SYSTEM, Collections.singletonList(PrivilegeType.GRANT), null, false); ObjectType t = ObjectType.USER; initPrivilegeCollectionAllObjects(rolePrivilegeCollection, t, provider.getAvailablePrivType(t)); rolePrivilegeCollection.disableMutable(); rolePrivilegeCollection = initBuiltinRoleUnlocked(PrivilegeBuiltinConstants.PUBLIC_ROLE_ID, PrivilegeBuiltinConstants.PUBLIC_ROLE_NAME, "built-in public role which is owned by any user"); List<PEntryObject> object = Collections.singletonList(new TablePEntryObject( Long.toString(SystemId.INFORMATION_SCHEMA_DB_ID), PrivilegeBuiltinConstants.ALL_TABLES_UUID)); rolePrivilegeCollection.grant(ObjectType.TABLE, Collections.singletonList(PrivilegeType.SELECT), object, false); UserPrivilegeCollectionV2 rootCollection = new UserPrivilegeCollectionV2(); rootCollection.grantRole(PrivilegeBuiltinConstants.ROOT_ROLE_ID); rootCollection.setDefaultRoleIds(Sets.newHashSet(PrivilegeBuiltinConstants.ROOT_ROLE_ID)); userToPrivilegeCollection.put(UserIdentity.ROOT, rootCollection); } catch (PrivilegeException e) { throw new RuntimeException("Fatal error when initializing built-in role and user", e); } } private void initPrivilegeCollections(PrivilegeCollectionV2 collection, ObjectType objectType, List<PrivilegeType> actionList, List<String> tokens, boolean isGrant) throws PrivilegeException { List<PEntryObject> object; if (tokens != null) { object = Collections.singletonList(provider.generateObject(objectType, tokens, globalStateMgr)); } else { object = Arrays.asList(new PEntryObject[] {null}); } collection.grant(objectType, actionList, object, isGrant); } private void initPrivilegeCollectionAllObjects( PrivilegeCollectionV2 collection, ObjectType objectType, List<PrivilegeType> actionList) throws PrivilegeException { List<PEntryObject> objects = new ArrayList<>(); if (ObjectType.TABLE.equals(objectType)) { objects.add(provider.generateObject(objectType, Lists.newArrayList("*", "*", "*"), globalStateMgr)); collection.grant(objectType, actionList, objects, false); } else if (ObjectType.VIEW.equals(objectType) || ObjectType.MATERIALIZED_VIEW.equals(objectType) || ObjectType.DATABASE.equals(objectType)) { objects.add(provider.generateObject(objectType, Lists.newArrayList("*", "*"), globalStateMgr)); collection.grant(objectType, actionList, objects, false); } else if (ObjectType.USER.equals(objectType)) { objects.add(provider.generateUserObject(objectType, null, globalStateMgr)); collection.grant(objectType, actionList, objects, false); } else if (ObjectType.RESOURCE.equals(objectType) || ObjectType.CATALOG.equals(objectType) || ObjectType.RESOURCE_GROUP.equals(objectType) || ObjectType.STORAGE_VOLUME.equals(objectType)) { objects.add(provider.generateObject(objectType, Lists.newArrayList("*"), globalStateMgr)); collection.grant(objectType, actionList, objects, false); } else if (ObjectType.FUNCTION.equals(objectType)) { objects.add(provider.generateFunctionObject(objectType, PrivilegeBuiltinConstants.ALL_DATABASE_ID, PrivilegeBuiltinConstants.ALL_FUNCTIONS_ID, globalStateMgr)); collection.grant(objectType, actionList, objects, false); } else if (ObjectType.GLOBAL_FUNCTION.equals(objectType)) { objects.add(provider.generateFunctionObject(objectType, PrivilegeBuiltinConstants.GLOBAL_FUNCTION_DEFAULT_DATABASE_ID, PrivilegeBuiltinConstants.ALL_FUNCTIONS_ID, globalStateMgr)); collection.grant(objectType, actionList, objects, false); } else if (ObjectType.SYSTEM.equals(objectType)) { collection.grant(objectType, actionList, Arrays.asList(new PEntryObject[] {null}), false); } else { throw new PrivilegeException("unsupported type " + objectType); } } private RolePrivilegeCollectionV2 initBuiltinRoleUnlocked(long roleId, String name) { return initBuiltinRoleUnlocked(roleId, name, null); } private RolePrivilegeCollectionV2 initBuiltinRoleUnlocked(long roleId, String name, String comment) { RolePrivilegeCollectionV2 collection = new RolePrivilegeCollectionV2( name, comment, RolePrivilegeCollectionV2.RoleFlags.MUTABLE); roleIdToPrivilegeCollection.put(roleId, collection); roleNameToId.put(name, roleId); LOG.info("create built-in role {}[{}]", name, roleId); return collection; } private void userReadLock() { userLock.readLock().lock(); } private void userReadUnlock() { userLock.readLock().unlock(); } private void userWriteLock() { userLock.writeLock().lock(); } private void userWriteUnlock() { userLock.writeLock().unlock(); } private void roleReadLock() { roleLock.readLock().lock(); } private void roleReadUnlock() { roleLock.readLock().unlock(); } private void roleWriteLock() { roleLock.writeLock().lock(); } private void roleWriteUnlock() { roleLock.writeLock().unlock(); } public void grant(GrantPrivilegeStmt stmt) throws DdlException { try { if (stmt.getRole() != null) { grantToRole( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.isWithGrantOption(), stmt.getRole()); } else { grantToUser( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.isWithGrantOption(), stmt.getUserIdentity()); } } catch (PrivilegeException e) { throw new DdlException("failed to grant: " + e.getMessage(), e); } } protected void grantToUser( ObjectType type, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, boolean isGrant, UserIdentity userIdentity) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollectionV2 collection = getUserPrivilegeCollectionUnlocked(userIdentity); collection.grant(type, privilegeTypes, objects, isGrant); globalStateMgr.getEditLog().logUpdateUserPrivilege( userIdentity, collection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(userIdentity); } finally { userWriteUnlock(); } } protected void grantToRole( ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, boolean isGrant, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(roleId, true); collection.grant(objectType, privilegeTypes, objects, isGrant); Map<Long, RolePrivilegeCollectionV2> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); } finally { roleWriteUnlock(); } } public void revoke(RevokePrivilegeStmt stmt) throws DdlException { try { if (stmt.getRole() != null) { revokeFromRole( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.getRole()); } else { revokeFromUser( stmt.getObjectType(), stmt.getPrivilegeTypes(), stmt.getObjectList(), stmt.getUserIdentity()); } } catch (PrivilegeException e) { throw new DdlException(e.getMessage()); } } protected void revokeFromUser( ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, UserIdentity userIdentity) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollectionV2 collection = getUserPrivilegeCollectionUnlocked(userIdentity); collection.revoke(objectType, privilegeTypes, objects); globalStateMgr.getEditLog().logUpdateUserPrivilege( userIdentity, collection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(userIdentity); } finally { userWriteUnlock(); } } protected void revokeFromRole( ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(roleId, true); collection.revoke(objectType, privilegeTypes, objects); invalidateRolesInCacheRoleUnlocked(roleId); Map<Long, RolePrivilegeCollectionV2> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); } finally { roleWriteUnlock(); } } public void grantRole(GrantRoleStmt stmt) throws DdlException { try { if (stmt.getUserIdentity() != null) { grantRoleToUser(stmt.getGranteeRole(), stmt.getUserIdentity()); } else { grantRoleToRole(stmt.getGranteeRole(), stmt.getRole()); } } catch (PrivilegeException e) { throw new DdlException("failed to grant role: " + e.getMessage(), e); } } protected void grantRoleToUser(List<String> parentRoleName, UserIdentity user) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollectionV2 userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(user); roleReadLock(); try { for (String parentRole : parentRoleName) { long roleId = getRoleIdByNameNoLock(parentRole); if (roleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Granting role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } userPrivilegeCollection.grantRole(roleId); boolean verifyDone = false; try { Set<Long> result = getAllPredecessorsUnlocked(userPrivilegeCollection); if (result.size() > Config.privilege_max_total_roles_per_user) { LOG.warn("too many predecessor roles {} for user {}", result, user); throw new PrivilegeException(String.format( "%s has total %d predecessor roles > %d!", user, result.size(), Config.privilege_max_total_roles_per_user)); } verifyDone = true; } finally { if (!verifyDone) { userPrivilegeCollection.revokeRole(roleId); } } } } finally { roleReadUnlock(); } globalStateMgr.getEditLog().logUpdateUserPrivilege( user, userPrivilegeCollection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(user); LOG.info("grant role {} to user {}", Joiner.on(", ").join(parentRoleName), user); } finally { userWriteUnlock(); } } protected void grantRoleToRole(List<String> parentRoleName, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(roleId, true); Map<Long, RolePrivilegeCollectionV2> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); for (String parentRole : parentRoleName) { long parentRoleId = getRoleIdByNameNoLock(parentRole); if (parentRoleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Granting role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } RolePrivilegeCollectionV2 parentCollection = getRolePrivilegeCollectionUnlocked(parentRoleId, true); Set<Long> parentRolePredecessors = getAllPredecessorsUnlocked(parentRoleId); if (parentRolePredecessors.contains(roleId)) { throw new PrivilegeException(String.format("role %s[%d] is already a predecessor role of %s[%d]", roleName, roleId, parentRole, parentRoleId)); } boolean verifyDone = false; parentCollection.addSubRole(roleId); try { parentRolePredecessors = getAllPredecessorsUnlocked(parentRoleId); parentRolePredecessors.add(parentRoleId); for (long i : parentRolePredecessors) { long cnt = getMaxRoleInheritanceDepthInner(0, i); if (cnt > Config.privilege_max_role_depth) { String name = getRolePrivilegeCollectionUnlocked(i, true).getName(); throw new PrivilegeException(String.format( "role inheritance depth for %s[%d] is %d > %d", name, i, cnt, Config.privilege_max_role_depth)); } } verifyDone = true; } finally { if (!verifyDone) { parentCollection.removeSubRole(roleId); } } collection.addParentRole(parentRoleId); rolePrivCollectionModified.put(parentRoleId, parentCollection); } invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollectionInfo info = new RolePrivilegeCollectionInfo( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); globalStateMgr.getEditLog().logUpdateRolePrivilege(info); LOG.info("grant role {}[{}] to role {}[{}]", parentRoleName, Joiner.on(", ").join(parentRoleName), roleName, roleId); } finally { roleWriteUnlock(); } } public void revokeRole(RevokeRoleStmt stmt) throws DdlException { try { if (stmt.getUserIdentity() != null) { revokeRoleFromUser(stmt.getGranteeRole(), stmt.getUserIdentity()); } else { revokeRoleFromRole(stmt.getGranteeRole(), stmt.getRole()); } } catch (PrivilegeException e) { throw new DdlException("failed to revoke role: " + e.getMessage(), e); } } protected void revokeRoleFromUser(List<String> roleNameList, UserIdentity user) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollectionV2 collection = getUserPrivilegeCollectionUnlocked(user); roleReadLock(); try { for (String roleName : roleNameList) { long roleId = getRoleIdByNameNoLock(roleName); if (roleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Revoking role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } collection.revokeRole(roleId); } } finally { roleReadUnlock(); } globalStateMgr.getEditLog().logUpdateUserPrivilege( user, collection, provider.getPluginId(), provider.getPluginVersion()); invalidateUserInCache(user); LOG.info("revoke role {} from user {}", roleNameList.toString(), user); } finally { userWriteUnlock(); } } protected void revokeRoleFromRole(List<String> parentRoleNameList, String roleName) throws PrivilegeException { roleWriteLock(); try { long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(roleId, true); for (String parentRoleName : parentRoleNameList) { long parentRoleId = getRoleIdByNameNoLock(parentRoleName); if (parentRoleId == PrivilegeBuiltinConstants.PUBLIC_ROLE_ID) { throw new PrivilegeException("Revoking role PUBLIC has no effect. " + "Every user and role has role PUBLIC implicitly granted."); } RolePrivilegeCollectionV2 parentCollection = getRolePrivilegeCollectionUnlocked(parentRoleId, true); parentCollection.removeSubRole(roleId); collection.removeParentRole(parentRoleId); } Map<Long, RolePrivilegeCollectionV2> rolePrivCollectionModified = new HashMap<>(); rolePrivCollectionModified.put(roleId, collection); List<Long> parentRoleIdList = new ArrayList<>(); for (String parentRoleName : parentRoleNameList) { long parentRoleId = getRoleIdByNameNoLock(parentRoleName); RolePrivilegeCollectionV2 parentCollection = getRolePrivilegeCollectionUnlocked(parentRoleId, true); parentRoleIdList.add(parentRoleId); rolePrivCollectionModified.put(parentRoleId, parentCollection); } RolePrivilegeCollectionInfo info = new RolePrivilegeCollectionInfo( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); globalStateMgr.getEditLog().logUpdateRolePrivilege(info); invalidateRolesInCacheRoleUnlocked(roleId); LOG.info("revoke role {}[{}] from role {}[{}]", parentRoleNameList.toString(), parentRoleIdList.toString(), roleName, roleId); } finally { roleWriteUnlock(); } } public void validateGrant(ObjectType objectType, List<PrivilegeType> privilegeTypes, List<PEntryObject> objects) throws PrivilegeException { provider.validateGrant(objectType, privilegeTypes, objects); } public static Set<Long> getOwnedRolesByUser(UserIdentity userIdentity) throws PrivilegeException { AuthorizationMgr manager = GlobalStateMgr.getCurrentState().getAuthorizationMgr(); try { manager.userReadLock(); UserPrivilegeCollectionV2 userCollection = manager.getUserPrivilegeCollectionUnlocked(userIdentity); return userCollection.getAllRoles(); } finally { manager.userReadUnlock(); } } protected boolean checkAction( PrivilegeCollectionV2 collection, ObjectType objectType, PrivilegeType privilegeType, List<String> objectNames) throws PrivilegeException { if (objectNames == null) { return provider.check(objectType, privilegeType, null, collection); } else { PEntryObject object = provider.generateObject( objectType, objectNames, globalStateMgr); return provider.check(objectType, privilegeType, object, collection); } } public boolean canExecuteAs(ConnectContext context, UserIdentity impersonateUser) { try { PrivilegeCollectionV2 collection = mergePrivilegeCollection(context.getCurrentUserIdentity(), context.getCurrentRoleIds()); PEntryObject object = provider.generateUserObject(ObjectType.USER, impersonateUser, globalStateMgr); return provider.check(ObjectType.USER, PrivilegeType.IMPERSONATE, object, collection); } catch (PrivilegeException e) { LOG.warn("caught exception in canExecuteAs() user[{}]", impersonateUser, e); return false; } } public boolean allowGrant(ConnectContext context, ObjectType type, List<PrivilegeType> wants, List<PEntryObject> objects) { try { PrivilegeCollectionV2 collection = mergePrivilegeCollection(context.getCurrentUserIdentity(), context.getCurrentRoleIds()); return checkAction(collection, ObjectType.SYSTEM, PrivilegeType.GRANT, null) || provider.allowGrant(type, wants, objects, collection); } catch (PrivilegeException e) { LOG.warn("caught exception when allowGrant", e); return false; } } public void replayUpdateUserPrivilegeCollection( UserIdentity user, UserPrivilegeCollectionV2 privilegeCollection, short pluginId, short pluginVersion) throws PrivilegeException { userWriteLock(); try { provider.upgradePrivilegeCollection(privilegeCollection, pluginId, pluginVersion); userToPrivilegeCollection.put(user, privilegeCollection); invalidateUserInCache(user); LOG.info("replayed update user {}", user); } finally { userWriteUnlock(); } } /** * init all builtin privilege when a user is created, called by AuthenticationManager */ public UserPrivilegeCollectionV2 onCreateUser(UserIdentity user, List<String> defaultRoleName) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollectionV2 privilegeCollection = new UserPrivilegeCollectionV2(); if (!defaultRoleName.isEmpty()) { Set<Long> roleIds = new HashSet<>(); for (String role : defaultRoleName) { Long roleId = getRoleIdByNameNoLock(role); privilegeCollection.grantRole(roleId); roleIds.add(roleId); } privilegeCollection.setDefaultRoleIds(roleIds); } userToPrivilegeCollection.put(user, privilegeCollection); LOG.info("user privilege for {} is created, role {} is granted", user, PrivilegeBuiltinConstants.PUBLIC_ROLE_NAME); return privilegeCollection; } finally { userWriteUnlock(); } } /** * drop user privilege collection when a user is dropped, called by AuthenticationManager */ public void onDropUser(UserIdentity user) { userWriteLock(); try { userToPrivilegeCollection.remove(user); invalidateUserInCache(user); } finally { userWriteUnlock(); } } public short getProviderPluginId() { return provider.getPluginId(); } public short getProviderPluginVersion() { return provider.getPluginVersion(); } /** * read from cache */ protected PrivilegeCollectionV2 mergePrivilegeCollection(UserIdentity userIdentity, Set<Long> roleIds) throws PrivilegeException { try { return ctxToMergedPrivilegeCollections.get(new Pair<>(userIdentity, roleIds)); } catch (ExecutionException e) { String errMsg = String.format("failed merge privilege collection on %s with roles %s", userIdentity, roleIds); PrivilegeException exception = new PrivilegeException(errMsg); exception.initCause(e); throw exception; } } /** * used for cache to do the actual merge job */ protected PrivilegeCollectionV2 loadPrivilegeCollection(UserIdentity userIdentity, Set<Long> roleIdsSpecified) throws PrivilegeException { PrivilegeCollectionV2 collection = new PrivilegeCollectionV2(); try { userReadLock(); Set<Long> validRoleIds; if (userIdentity.isEphemeral()) { Preconditions.checkState(roleIdsSpecified != null, "ephemeral use should always have current role ids specified"); validRoleIds = roleIdsSpecified; } else { UserPrivilegeCollectionV2 userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); collection.merge(userPrivilegeCollection); validRoleIds = new HashSet<>(userPrivilegeCollection.getAllRoles()); if (roleIdsSpecified != null) { validRoleIds.retainAll(roleIdsSpecified); } } try { roleReadLock(); validRoleIds = getAllPredecessorsUnlocked(validRoleIds); for (long roleId : validRoleIds) { RolePrivilegeCollectionV2 rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, false); if (rolePrivilegeCollection != null) { collection.merge(rolePrivilegeCollection); } } RolePrivilegeCollectionV2 rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(PrivilegeBuiltinConstants.PUBLIC_ROLE_ID, false); if (rolePrivilegeCollection != null) { collection.merge(rolePrivilegeCollection); } } finally { roleReadUnlock(); } } finally { userReadUnlock(); } return collection; } /** * if the privileges of a role are changed, call this function to invalidate cache * requires role lock */ protected void invalidateRolesInCacheRoleUnlocked(long roleId) throws PrivilegeException { Set<Long> badRoles = getAllDescendantsUnlocked(roleId); List<Pair<UserIdentity, Set<Long>>> badKeys = new ArrayList<>(); for (Pair<UserIdentity, Set<Long>> pair : ctxToMergedPrivilegeCollections.asMap().keySet()) { Set<Long> roleIds = pair.second; if (roleIds == null) { roleIds = getRoleIdsByUser(pair.first); } for (long badRoleId : badRoles) { if (roleIds.contains(badRoleId)) { badKeys.add(pair); break; } } } for (Pair<UserIdentity, Set<Long>> pair : badKeys) { ctxToMergedPrivilegeCollections.invalidate(pair); } } /** * if the privileges of a user are changed, call this function to invalidate cache * require not extra lock. */ protected void invalidateUserInCache(UserIdentity userIdentity) { List<Pair<UserIdentity, Set<Long>>> badKeys = new ArrayList<>(); for (Pair<UserIdentity, Set<Long>> pair : ctxToMergedPrivilegeCollections.asMap().keySet()) { if (pair.first.equals(userIdentity)) { badKeys.add(pair); } } for (Pair<UserIdentity, Set<Long>> pair : badKeys) { ctxToMergedPrivilegeCollections.invalidate(pair); } } public UserPrivilegeCollectionV2 getUserPrivilegeCollection(UserIdentity userIdentity) { userReadLock(); try { return userToPrivilegeCollection.get(userIdentity); } finally { userReadUnlock(); } } public UserPrivilegeCollectionV2 getUserPrivilegeCollectionUnlocked(UserIdentity userIdentity) throws PrivilegeException { UserPrivilegeCollectionV2 userCollection = userToPrivilegeCollection.get(userIdentity); if (userCollection == null) { throw new PrivilegeException("cannot find user " + (userIdentity == null ? "null" : userIdentity.toString())); } return userCollection; } public List<String> getAllUsers() { userReadLock(); try { List<String> users = Lists.newArrayList(); Set<UserIdentity> userIdentities = userToPrivilegeCollection.keySet(); for (UserIdentity userIdentity : userIdentities) { users.add(userIdentity.toString()); } return users; } finally { userReadUnlock(); } } public Set<UserIdentity> getAllUserIdentities() { userReadLock(); try { List<String> users = Lists.newArrayList(); Set<UserIdentity> userIdentities = userToPrivilegeCollection.keySet(); return userIdentities; } finally { userReadUnlock(); } } protected UserPrivilegeCollectionV2 getUserPrivilegeCollectionUnlockedAllowNull(UserIdentity userIdentity) { return userToPrivilegeCollection.get(userIdentity); } public RolePrivilegeCollectionV2 getRolePrivilegeCollection(String roleName) { roleReadLock(); try { Long roleId = roleNameToId.get(roleName); if (roleId == null) { return null; } return roleIdToPrivilegeCollection.get(roleId); } finally { roleReadUnlock(); } } public RolePrivilegeCollectionV2 getRolePrivilegeCollection(long roleId) { roleReadLock(); try { return roleIdToPrivilegeCollection.get(roleId); } finally { roleReadUnlock(); } } public RolePrivilegeCollectionV2 getRolePrivilegeCollectionUnlocked(long roleId, boolean exceptionIfNotExists) throws PrivilegeException { RolePrivilegeCollectionV2 collection = roleIdToPrivilegeCollection.get(roleId); if (collection == null) { if (exceptionIfNotExists) { throw new PrivilegeException("cannot find role" + roleId); } else { return null; } } return collection; } public List<String> getGranteeRoleDetailsForRole(String roleName) { roleReadLock(); try { Long roleId = getRoleIdByNameAllowNull(roleName); if (roleId == null) { throw new SemanticException("cannot find role " + roleName); } RolePrivilegeCollectionV2 rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, true); List<String> parentRoleNameList = new ArrayList<>(); for (Long parentRoleId : rolePrivilegeCollection.getParentRoleIds()) { RolePrivilegeCollectionV2 parentRolePriv = getRolePrivilegeCollectionUnlocked(parentRoleId, false); if (parentRolePriv != null) { parentRoleNameList.add(parentRolePriv.getName()); } } if (!parentRoleNameList.isEmpty()) { return Lists.newArrayList(roleName, null, AstToSQLBuilder.toSQL(new GrantRoleStmt(parentRoleNameList, roleName))); } return null; } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { roleReadUnlock(); } } public Map<ObjectType, List<PrivilegeEntry>> getTypeToPrivilegeEntryListByRole(String roleName) { roleReadLock(); try { Long roleId = getRoleIdByNameAllowNull(roleName); if (roleId == null) { throw new SemanticException("cannot find role " + roleName); } RolePrivilegeCollectionV2 rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, true); return rolePrivilegeCollection.getTypeToPrivilegeEntryList(); } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { roleReadUnlock(); } } public List<String> getGranteeRoleDetailsForUser(UserIdentity userIdentity) { userReadLock(); try { UserPrivilegeCollectionV2 userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); Set<Long> allRoles = userPrivilegeCollection.getAllRoles(); roleReadLock(); try { List<String> parentRoleNameList = new ArrayList<>(); for (Long roleId : allRoles) { RolePrivilegeCollectionV2 parentRolePriv = getRolePrivilegeCollectionUnlocked(roleId, false); if (parentRolePriv != null) { parentRoleNameList.add(parentRolePriv.getName()); } } if (!parentRoleNameList.isEmpty()) { return Lists.newArrayList(userIdentity.toString(), null, AstToSQLBuilder.toSQL(new GrantRoleStmt(parentRoleNameList, userIdentity))); } return null; } finally { roleReadUnlock(); } } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { userReadUnlock(); } } public Map<ObjectType, List<PrivilegeEntry>> getTypeToPrivilegeEntryListByUser( UserIdentity userIdentity) { userReadLock(); try { UserPrivilegeCollectionV2 userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); return userPrivilegeCollection.getTypeToPrivilegeEntryList(); } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { userReadUnlock(); } } public Map<ObjectType, List<PrivilegeEntry>> getMergedTypeToPrivilegeEntryListByUser( UserIdentity userIdentity) { userReadLock(); try { UserPrivilegeCollectionV2 userPrivilegeCollection = getUserPrivilegeCollectionUnlocked(userIdentity); PrivilegeCollectionV2 collection = mergePrivilegeCollection(userIdentity, userPrivilegeCollection.getAllRoles()); return collection.getTypeToPrivilegeEntryList(); } catch (PrivilegeException e) { throw new SemanticException(e.getMessage()); } finally { userReadUnlock(); } } public List<String> getAllRoles() { roleReadLock(); try { List<String> roles = new ArrayList<>(); for (RolePrivilegeCollectionV2 rolePrivilegeCollection : roleIdToPrivilegeCollection.values()) { roles.add(rolePrivilegeCollection.getName()); } return roles; } finally { roleReadUnlock(); } } public List<PrivilegeType> analyzeActionSet(ObjectType objectType, ActionSet actionSet) { List<PrivilegeType> privilegeTypes = provider.getAvailablePrivType(objectType); List<PrivilegeType> actions = new ArrayList<>(); for (PrivilegeType actionName : privilegeTypes) { if (actionSet.contains(actionName)) { actions.add(actionName); } } return actions; } public boolean isAvailablePrivType(ObjectType objectType, PrivilegeType privilegeType) { return provider.isAvailablePrivType(objectType, privilegeType); } public List<PrivilegeType> getAvailablePrivType(ObjectType objectType) { return provider.getAvailablePrivType(objectType); } public void createRole(CreateRoleStmt stmt) { roleWriteLock(); try { Map<String, Long> roleNameToBeCreated = new HashMap<>(); Map<Long, RolePrivilegeCollectionV2> rolePrivCollectionModified = new HashMap<>(); for (String roleName : stmt.getRoles()) { if (roleNameToId.containsKey(roleName)) { LOG.info("Operation CREATE ROLE failed for " + roleName + " : role " + roleName + " already exists"); return; } long roleId = globalStateMgr.getNextId(); RolePrivilegeCollectionV2 collection = new RolePrivilegeCollectionV2( roleName, stmt.getComment(), RolePrivilegeCollectionV2.RoleFlags.REMOVABLE, RolePrivilegeCollectionV2.RoleFlags.MUTABLE); rolePrivCollectionModified.put(roleId, collection); roleNameToBeCreated.put(roleName, roleId); } roleIdToPrivilegeCollection.putAll(rolePrivCollectionModified); roleNameToId.putAll(roleNameToBeCreated); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); LOG.info("created role {}[{}]", stmt.getRoles().toString(), roleNameToBeCreated.values()); } finally { roleWriteUnlock(); } } public void alterRole(AlterRoleStmt stmt) throws DdlException { try { roleWriteLock(); Map<Long, RolePrivilegeCollectionV2> rolePrivCollectionModified = new HashMap<>(); for (String roleName : stmt.getRoles()) { if (!roleNameToId.containsKey(roleName)) { throw new DdlException(roleName + " doesn't exist"); } long roleId = roleNameToId.get(roleName); RolePrivilegeCollectionV2 rolePrivilegeCollection = roleIdToPrivilegeCollection.get(roleId); Preconditions.checkNotNull(rolePrivilegeCollection); if (!rolePrivilegeCollection.isMutable()) { throw new DdlException(roleName + " is immutable"); } rolePrivCollectionModified.put(roleId, rolePrivilegeCollection); } rolePrivCollectionModified.values().forEach( rolePrivilegeCollection -> rolePrivilegeCollection.setComment(stmt.getComment())); globalStateMgr.getEditLog().logUpdateRolePrivilege( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); } finally { roleWriteUnlock(); } } public void replayUpdateRolePrivilegeCollection( RolePrivilegeCollectionInfo info) throws PrivilegeException { roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollectionV2> entry : info.getRolePrivCollectionModified().entrySet()) { long roleId = entry.getKey(); invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollectionV2 privilegeCollection = entry.getValue(); provider.upgradePrivilegeCollection(privilegeCollection, info.getPluginId(), info.getPluginVersion()); roleIdToPrivilegeCollection.put(roleId, privilegeCollection); if (!roleNameToId.containsKey(privilegeCollection.getName())) { roleNameToId.put(privilegeCollection.getName(), roleId); } LOG.info("replayed update role {}", roleId); } } finally { roleWriteUnlock(); } } public void dropRole(DropRoleStmt stmt) throws DdlException { roleWriteLock(); try { List<String> roleNameToBeDropped = new ArrayList<>(); Map<Long, RolePrivilegeCollectionV2> rolePrivCollectionModified = new HashMap<>(); for (String roleName : stmt.getRoles()) { if (!roleNameToId.containsKey(roleName)) { LOG.info("Operation DROP ROLE failed for " + roleName + " : role " + roleName + " not exists"); return; } long roleId = getRoleIdByNameNoLock(roleName); RolePrivilegeCollectionV2 collection = roleIdToPrivilegeCollection.get(roleId); if (!collection.isRemovable()) { throw new DdlException("role " + roleName + " cannot be dropped!"); } roleNameToBeDropped.add(roleName); rolePrivCollectionModified.put(roleId, collection); invalidateRolesInCacheRoleUnlocked(roleId); } roleIdToPrivilegeCollection.keySet().removeAll(rolePrivCollectionModified.keySet()); roleNameToBeDropped.forEach(roleNameToId.keySet()::remove); globalStateMgr.getEditLog().logDropRole( rolePrivCollectionModified, provider.getPluginId(), provider.getPluginVersion()); LOG.info("dropped role {}[{}]", stmt.getRoles().toString(), rolePrivCollectionModified.keySet().toString()); } catch (PrivilegeException e) { throw new DdlException("failed to drop role: " + e.getMessage(), e); } finally { roleWriteUnlock(); } } public void replayDropRole( RolePrivilegeCollectionInfo info) throws PrivilegeException { roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollectionV2> entry : info.getRolePrivCollectionModified().entrySet()) { long roleId = entry.getKey(); invalidateRolesInCacheRoleUnlocked(roleId); RolePrivilegeCollectionV2 privilegeCollection = entry.getValue(); provider.upgradePrivilegeCollection(privilegeCollection, info.getPluginId(), info.getPluginVersion()); roleIdToPrivilegeCollection.remove(roleId); roleNameToId.remove(privilegeCollection.getName()); LOG.info("replayed drop role {}", roleId); } } finally { roleWriteUnlock(); } } public boolean checkRoleExists(String name) { roleReadLock(); try { return roleNameToId.containsKey(name); } finally { roleReadUnlock(); } } public boolean isBuiltinRole(String name) { return PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_NAMES.contains(name); } public String getRoleComment(String name) { try { roleReadLock(); String result = FeConstants.NULL_STRING; Long roleId = roleNameToId.get(name); if (roleId != null) { String comment = roleIdToPrivilegeCollection.get(roleId).getComment(); if (!Strings.isNullOrEmpty(comment)) { result = comment; } } return result; } finally { roleReadUnlock(); } } public Set<Long> getRoleIdsByUser(UserIdentity user) throws PrivilegeException { userReadLock(); try { Set<Long> ret = new HashSet<>(); roleReadLock(); try { for (long roleId : getUserPrivilegeCollectionUnlocked(user).getAllRoles()) { if (getRolePrivilegeCollectionUnlocked(roleId, false) != null) { ret.add(roleId); } } return ret; } finally { roleReadUnlock(); } } finally { userReadUnlock(); } } public Set<Long> getDefaultRoleIdsByUser(UserIdentity user) throws PrivilegeException { userReadLock(); try { Set<Long> ret = new HashSet<>(); roleReadLock(); try { for (long roleId : getUserPrivilegeCollectionUnlocked(user).getDefaultRoleIds()) { if (getRolePrivilegeCollectionUnlocked(roleId, false) != null) { ret.add(roleId); } } return ret; } finally { roleReadUnlock(); } } finally { userReadUnlock(); } } public void setUserDefaultRole(Set<Long> roleName, UserIdentity user) throws PrivilegeException { userWriteLock(); try { UserPrivilegeCollectionV2 collection = getUserPrivilegeCollectionUnlocked(user); roleReadLock(); try { collection.setDefaultRoleIds(roleName); } finally { roleReadUnlock(); } globalStateMgr.getEditLog().logUpdateUserPrivilege( user, collection, provider.getPluginId(), provider.getPluginVersion()); LOG.info("grant role {} to user {}", roleName, user); } finally { userWriteUnlock(); } } public List<String> getRoleNamesByUser(UserIdentity user) throws PrivilegeException { try { userReadLock(); List<String> roleNameList = Lists.newArrayList(); try { roleReadLock(); for (long roleId : getUserPrivilegeCollectionUnlocked(user).getAllRoles()) { RolePrivilegeCollectionV2 rolePrivilegeCollection = getRolePrivilegeCollectionUnlocked(roleId, false); if (rolePrivilegeCollection != null) { roleNameList.add(rolePrivilegeCollection.getName()); } } return roleNameList; } finally { roleReadUnlock(); } } finally { userReadUnlock(); } } public Long getRoleIdByNameAllowNull(String name) { roleReadLock(); try { return roleNameToId.get(name); } finally { roleReadUnlock(); } } protected Long getRoleIdByNameNoLock(String name) throws PrivilegeException { Long roleId = roleNameToId.get(name); if (roleId == null) { throw new PrivilegeException(String.format("Role %s doesn't exist!", name)); } return roleId; } public PEntryObject generateObject(ObjectType objectType, List<String> objectTokenList) throws PrivilegeException { if (objectTokenList == null) { return null; } return this.provider.generateObject(objectType, objectTokenList, globalStateMgr); } public PEntryObject generateUserObject(ObjectType objectType, UserIdentity user) throws PrivilegeException { return this.provider.generateUserObject(objectType, user, globalStateMgr); } public PEntryObject generateFunctionObject(ObjectType objectType, Long databaseId, Long functionId) throws PrivilegeException { return this.provider.generateFunctionObject(objectType, databaseId, functionId, globalStateMgr); } /** * remove invalid object periodically * <p> * lock order should always be: * AuthenticationManager.lock -> AuthorizationManager.userLock -> AuthorizationManager.roleLock */ public void removeInvalidObject() { userWriteLock(); try { for (Map.Entry<UserIdentity, UserPrivilegeCollectionV2> userPrivEntry : userToPrivilegeCollection.entrySet()) { userPrivEntry.getValue().removeInvalidObject(globalStateMgr); } roleReadLock(); try { for (Map.Entry<UserIdentity, UserPrivilegeCollectionV2> userPrivEntry : userToPrivilegeCollection.entrySet()) { removeInvalidRolesUnlocked(userPrivEntry.getValue().getAllRoles()); removeInvalidRolesUnlocked(userPrivEntry.getValue().getDefaultRoleIds()); } } finally { roleReadUnlock(); } } finally { userWriteUnlock(); } userReadLock(); try { roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollectionV2> rolePrivEntry : roleIdToPrivilegeCollection.entrySet()) { rolePrivEntry.getValue().removeInvalidObject(globalStateMgr); } } finally { roleWriteUnlock(); } } finally { userReadUnlock(); } roleWriteLock(); try { for (Map.Entry<Long, RolePrivilegeCollectionV2> rolePrivEntry : roleIdToPrivilegeCollection.entrySet()) { RolePrivilegeCollectionV2 collection = rolePrivEntry.getValue(); removeInvalidRolesUnlocked(collection.getParentRoleIds()); removeInvalidRolesUnlocked(collection.getSubRoleIds()); } } finally { roleWriteUnlock(); } } private void removeInvalidRolesUnlocked(Set<Long> roleIds) { roleIds.removeIf(aLong -> !roleIdToPrivilegeCollection.containsKey(aLong)); } /** * get max role inheritance depth * e.g. grant role_a to role role_b; grant role_b to role role_c; * then the inheritance graph would be role_a -> role_b -> role_c * the role inheritance depth for role_a would be 2, for role_b would be 1, for role_c would be 0 */ protected long getMaxRoleInheritanceDepthInner(long currentDepth, long roleId) throws PrivilegeException { RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(roleId, false); if (collection == null) { return currentDepth - 1; } Set<Long> subRoleIds = collection.getSubRoleIds(); if (subRoleIds.isEmpty()) { return currentDepth; } else { long maxDepth = -1; for (long subRoleId : subRoleIds) { maxDepth = Math.max(maxDepth, getMaxRoleInheritanceDepthInner(currentDepth + 1, subRoleId)); } return maxDepth; } } /** * get all descendants roles(sub roles and their subs etc.) * e.g. grant role_a to role role_b; grant role_b to role role_c; * then the inheritance graph would be role_a -> role_b -> role_c * then all descendants roles of role_a would be [role_b, role_c] */ protected Set<Long> getAllDescendantsUnlocked(long roleId) throws PrivilegeException { Set<Long> set = new HashSet<>(); set.add(roleId); getAllDescendantsUnlockedInner(roleId, set); return set; } protected void getAllDescendantsUnlockedInner(long roleId, Set<Long> resultSet) throws PrivilegeException { RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(roleId, false); if (collection == null) { return; } for (Long subId : collection.getSubRoleIds()) { if (!resultSet.contains(subId)) { resultSet.add(subId); getAllDescendantsUnlockedInner(subId, resultSet); } } } /** * get all predecessors roles (parent roles and their parents etc.) * e.g. grant role_a to role role_b; grant role_b to role role_c; * then the inheritance graph would be role_a -> role_b -> role_c * then all parent roles of role_c would be [role_a, role_b] */ protected Set<Long> getAllPredecessorsUnlocked(UserPrivilegeCollectionV2 collection) throws PrivilegeException { return getAllPredecessorsUnlocked(collection.getAllRoles()); } protected Set<Long> getAllPredecessorsUnlocked(long roleId) throws PrivilegeException { Set<Long> set = new HashSet<>(); set.add(roleId); return getAllPredecessorsUnlocked(set); } protected Set<Long> getAllPredecessorsUnlocked(Set<Long> initialRoleIds) throws PrivilegeException { Set<Long> result = new HashSet<>(initialRoleIds); for (long roleId : initialRoleIds) { getAllPredecessorsInner(roleId, result); } return result; } protected void getAllPredecessorsInner(long roleId, Set<Long> resultSet) throws PrivilegeException { RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(roleId, false); if (collection == null) { resultSet.remove(roleId); return; } for (Long parentId : collection.getParentRoleIds()) { if (!resultSet.contains(parentId)) { resultSet.add(parentId); getAllPredecessorsInner(parentId, resultSet); } } } /** * Use new image format by SRMetaBlockWriter/SRMetaBlockReader * +------------------+ * | header | * +------------------+ * | | * | Authorization- | * | Manager | * | | * +------------------+ * | numUser | * +------------------+ * | User | * | Privilege | * | Collection 1 | * +------------------+ * | User | * | Privilege | * | Collection 2 | * +------------------+ * | ... | * +------------------+ * | numRole | * +------------------+ * | Role | * | Privilege | * | Collection 1 | * +------------------+ * | Role | * | Privilege | * | Collection 1 | * +------------------+ * | ... | * +------------------+ * | footer | * +------------------+ */ public void save(DataOutputStream dos) throws IOException { try { final int cnt = 1 + 1 + userToPrivilegeCollection.size() * 2 + 1 + roleIdToPrivilegeCollection.size() * 2; SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, "com.starrocks.privilege.AuthorizationManager", cnt); writer.writeJson(this); writer.writeJson(userToPrivilegeCollection.size()); Iterator<Map.Entry<UserIdentity, UserPrivilegeCollectionV2>> iterator = userToPrivilegeCollection.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<UserIdentity, UserPrivilegeCollectionV2> entry = iterator.next(); writer.writeJson(entry.getKey()); UserPrivilegeCollectionV2 userPrivilegeCollection = entry.getValue(); UserPrivilegeCollection userPrivilegeCollectionDeprecate = new UserPrivilegeCollection(); userPrivilegeCollectionDeprecate.grantRoles(userPrivilegeCollection.getAllRoles()); userPrivilegeCollectionDeprecate.setDefaultRoleIds(userPrivilegeCollection.getDefaultRoleIds()); Map<ObjectType, List<PrivilegeEntry>> m = userPrivilegeCollection.getTypeToPrivilegeEntryList(); for (Map.Entry<ObjectType, List<PrivilegeEntry>> e : m.entrySet()) { userPrivilegeCollectionDeprecate.getTypeToPrivilegeEntryList() .put(ObjectTypeDeprecate.toObjectTypeDeprecate(e.getKey()), e.getValue()); } writer.writeJson(userPrivilegeCollectionDeprecate); } writer.writeJson(roleIdToPrivilegeCollection.size()); Iterator<Map.Entry<Long, RolePrivilegeCollectionV2>> roleIter = roleIdToPrivilegeCollection.entrySet().iterator(); while (roleIter.hasNext()) { Map.Entry<Long, RolePrivilegeCollectionV2> entry = roleIter.next(); writer.writeJson(entry.getKey()); RolePrivilegeCollectionV2 rolePrivilegeCollection = entry.getValue(); RolePrivilegeCollection rolePrivilegeCollectionDeprecate; if (rolePrivilegeCollection.isRemovable() && rolePrivilegeCollection.isMutable()) { rolePrivilegeCollectionDeprecate = new RolePrivilegeCollection(rolePrivilegeCollection.getName(), rolePrivilegeCollection.getComment(), RolePrivilegeCollection.RoleFlags.REMOVABLE, RolePrivilegeCollection.RoleFlags.MUTABLE); } else if (rolePrivilegeCollection.isRemovable()) { rolePrivilegeCollectionDeprecate = new RolePrivilegeCollection(rolePrivilegeCollection.getName(), rolePrivilegeCollection.getComment(), RolePrivilegeCollection.RoleFlags.REMOVABLE); } else if (rolePrivilegeCollection.isMutable()) { rolePrivilegeCollectionDeprecate = new RolePrivilegeCollection(rolePrivilegeCollection.getName(), rolePrivilegeCollection.getComment(), RolePrivilegeCollection.RoleFlags.MUTABLE); } else { rolePrivilegeCollectionDeprecate = new RolePrivilegeCollection(rolePrivilegeCollection.getName(), rolePrivilegeCollection.getComment()); } for (Long r : rolePrivilegeCollection.getParentRoleIds()) { rolePrivilegeCollectionDeprecate.addParentRole(r); } for (Long r : rolePrivilegeCollection.getSubRoleIds()) { rolePrivilegeCollectionDeprecate.addSubRole(r); } Map<ObjectType, List<PrivilegeEntry>> m = rolePrivilegeCollection.getTypeToPrivilegeEntryList(); for (Map.Entry<ObjectType, List<PrivilegeEntry>> e : m.entrySet()) { rolePrivilegeCollectionDeprecate.getTypeToPrivilegeEntryList() .put(ObjectTypeDeprecate.toObjectTypeDeprecate(e.getKey()), e.getValue()); } writer.writeJson(rolePrivilegeCollectionDeprecate); } writer.close(); } catch (SRMetaBlockException e) { throw new IOException("failed to save AuthenticationManager!", e); } catch (PrivilegeException e) { throw new RuntimeException(e); } } public static AuthorizationMgr load( DataInputStream dis, GlobalStateMgr globalStateMgr, AuthorizationProvider provider) throws IOException, DdlException { try { SRMetaBlockReader reader = new SRMetaBlockReader(dis, "com.starrocks.privilege.AuthorizationManager"); AuthorizationMgr ret = null; try { ret = reader.readJson(AuthorizationMgr.class); ret.globalStateMgr = globalStateMgr; if (provider == null) { ret.provider = new DefaultAuthorizationProvider(); } else { ret.provider = provider; } ret.initBuiltinRolesAndUsers(); int numUser = reader.readJson(int.class); LOG.info("loading {} users", numUser); for (int i = 0; i != numUser; ++i) { UserIdentity userIdentity = reader.readJson(UserIdentity.class); UserPrivilegeCollection collectionDeprecate = reader.readJson(UserPrivilegeCollection.class); UserPrivilegeCollectionV2 collection = new UserPrivilegeCollectionV2(); collection.grantRoles(collectionDeprecate.getAllRoles()); collection.setDefaultRoleIds(collectionDeprecate.getDefaultRoleIds()); Map<ObjectTypeDeprecate, List<PrivilegeEntry>> m = collectionDeprecate.getTypeToPrivilegeEntryList(); for (Map.Entry<ObjectTypeDeprecate, List<PrivilegeEntry>> e : m.entrySet()) { collection.getTypeToPrivilegeEntryList().put(e.getKey().toObjectType(), e.getValue()); } if (userIdentity.equals(UserIdentity.ROOT)) { UserPrivilegeCollectionV2 rootUserPrivCollection = ret.getUserPrivilegeCollectionUnlocked(UserIdentity.ROOT); collection.grantRoles(rootUserPrivCollection.getAllRoles()); collection.setDefaultRoleIds(rootUserPrivCollection.getDefaultRoleIds()); collection.typeToPrivilegeEntryList = rootUserPrivCollection.typeToPrivilegeEntryList; } ret.provider.upgradePrivilegeCollection(collection, ret.pluginId, ret.pluginVersion); ret.userToPrivilegeCollection.put(userIdentity, collection); } int numRole = reader.readJson(int.class); LOG.info("loading {} roles", numRole); for (int i = 0; i != numRole; ++i) { Long roleId = reader.readJson(Long.class); RolePrivilegeCollection collectionDeprecate = reader.readJson(RolePrivilegeCollection.class); RolePrivilegeCollectionV2 rolePrivilegeCollection = null; if (collectionDeprecate.isRemovable() && collectionDeprecate.isMutable()) { rolePrivilegeCollection = new RolePrivilegeCollectionV2(collectionDeprecate.getName(), collectionDeprecate.getComment(), RolePrivilegeCollectionV2.RoleFlags.REMOVABLE, RolePrivilegeCollectionV2.RoleFlags.MUTABLE); } else if (collectionDeprecate.isRemovable()) { rolePrivilegeCollection = new RolePrivilegeCollectionV2(collectionDeprecate.getName(), collectionDeprecate.getComment(), RolePrivilegeCollectionV2.RoleFlags.REMOVABLE); } else if (collectionDeprecate.isMutable()) { rolePrivilegeCollection = new RolePrivilegeCollectionV2(collectionDeprecate.getName(), collectionDeprecate.getComment(), RolePrivilegeCollectionV2.RoleFlags.MUTABLE); } else { rolePrivilegeCollection = new RolePrivilegeCollectionV2(collectionDeprecate.getName(), collectionDeprecate.getComment()); } for (Long r : collectionDeprecate.getParentRoleIds()) { rolePrivilegeCollection.addParentRole(r); } for (Long r : collectionDeprecate.getSubRoleIds()) { rolePrivilegeCollection.addSubRole(r); } if (PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_IDS.contains(roleId)) { RolePrivilegeCollectionV2 builtInRolePrivilegeCollection = ret.roleIdToPrivilegeCollection.get(roleId); rolePrivilegeCollection.typeToPrivilegeEntryList = builtInRolePrivilegeCollection.typeToPrivilegeEntryList; } else { Map<ObjectTypeDeprecate, List<PrivilegeEntry>> m = collectionDeprecate.getTypeToPrivilegeEntryList(); for (Map.Entry<ObjectTypeDeprecate, List<PrivilegeEntry>> e : m.entrySet()) { rolePrivilegeCollection.getTypeToPrivilegeEntryList().put(e.getKey().toObjectType(), e.getValue()); } } ret.provider.upgradePrivilegeCollection(rolePrivilegeCollection, ret.pluginId, ret.pluginVersion); ret.roleIdToPrivilegeCollection.put(roleId, rolePrivilegeCollection); } } catch (SRMetaBlockEOFException eofException) { LOG.warn("got EOF exception, ignore, ", eofException); } finally { reader.close(); } assert ret != null; LOG.info("loaded {} users, {} roles", ret.userToPrivilegeCollection.size(), ret.roleIdToPrivilegeCollection.size()); ret.isLoaded = true; return ret; } catch (SRMetaBlockException | PrivilegeException e) { throw new DdlException("failed to load AuthorizationManager!", e); } } public boolean isLoaded() { return isLoaded; } public void setLoaded(boolean loaded) { isLoaded = loaded; } /** * these public interfaces are for AuthUpgrader to upgrade from 2.x */ public void upgradeUserInitPrivilegeUnlock(UserIdentity userIdentity, UserPrivilegeCollectionV2 collection) { userToPrivilegeCollection.put(userIdentity, collection); LOG.info("upgrade user {}", userIdentity); } public void upgradeRoleInitPrivilegeUnlock(long roleId, RolePrivilegeCollectionV2 collection) { roleIdToPrivilegeCollection.put(roleId, collection); roleNameToId.put(collection.getName(), roleId); LOG.info("upgrade role {}[{}]", collection.getName(), roleId); } public void grantStorageVolumeUsageToPublicRole(String storageVolumeId) throws PrivilegeException { roleWriteLock(); try { RolePrivilegeCollectionV2 collection = getRolePrivilegeCollectionUnlocked(PrivilegeBuiltinConstants.PUBLIC_ROLE_ID, true); List<PEntryObject> object = Collections.singletonList(new StorageVolumePEntryObject(storageVolumeId)); collection.grant(ObjectType.STORAGE_VOLUME, Collections.singletonList(PrivilegeType.USAGE), object, false); } finally { roleWriteUnlock(); } } public void loadV2(SRMetaBlockReader reader) throws IOException, SRMetaBlockException, SRMetaBlockEOFException { AuthorizationMgr ret = null; try { ret = reader.readJson(AuthorizationMgr.class); ret.globalStateMgr = globalStateMgr; if (provider == null) { ret.provider = new DefaultAuthorizationProvider(); } else { ret.provider = provider; } ret.initBuiltinRolesAndUsers(); int numUser = reader.readJson(int.class); LOG.info("loading {} users", numUser); for (int i = 0; i != numUser; ++i) { UserIdentity userIdentity = reader.readJson(UserIdentity.class); UserPrivilegeCollectionV2 collection = reader.readJson(UserPrivilegeCollectionV2.class); if (userIdentity.equals(UserIdentity.ROOT)) { UserPrivilegeCollectionV2 rootUserPrivCollection = ret.getUserPrivilegeCollectionUnlocked(UserIdentity.ROOT); collection.grantRoles(rootUserPrivCollection.getAllRoles()); collection.setDefaultRoleIds(rootUserPrivCollection.getDefaultRoleIds()); collection.typeToPrivilegeEntryList = rootUserPrivCollection.typeToPrivilegeEntryList; } ret.userToPrivilegeCollection.put(userIdentity, collection); } int numRole = reader.readJson(int.class); LOG.info("loading {} roles", numRole); for (int i = 0; i != numRole; ++i) { Long roleId = reader.readJson(Long.class); RolePrivilegeCollectionV2 collection = reader.readJson(RolePrivilegeCollectionV2.class); if (PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_IDS.contains(roleId)) { RolePrivilegeCollectionV2 builtInRolePrivilegeCollection = ret.roleIdToPrivilegeCollection.get(roleId); collection.typeToPrivilegeEntryList = builtInRolePrivilegeCollection.typeToPrivilegeEntryList; } ret.roleIdToPrivilegeCollection.put(roleId, collection); } assert ret != null; LOG.info("loaded {} users, {} roles", ret.userToPrivilegeCollection.size(), ret.roleIdToPrivilegeCollection.size()); isLoaded = true; userToPrivilegeCollection = ret.userToPrivilegeCollection; roleIdToPrivilegeCollection = ret.roleIdToPrivilegeCollection; } catch (PrivilegeException e) { throw new IOException("failed to load AuthorizationManager!", e); } } }
> List<ScalarOperator> partitions = new ArrayList<>(); for (Expr partitionExpression : windowOperator.getPartitionExprs()) { ScalarOperator operator = SqlToScalarOperatorTranslator .translate(partitionExpression, subOpt.getExpressionMapping(), columnRefFactory); partitions.add(operator); } List<Ordering> orderings = new ArrayList<>(); for (OrderByElement orderByElement : windowOperator.getOrderByElements()) { ColumnRefOperator col = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(orderByElement.getExpr(), subOpt.getExpressionMapping(), columnRefFactory); orderings.add(new Ordering(col, orderByElement.getIsAsc(), OrderByElement.nullsFirst(orderByElement.getNullsFirstParam()))); } Shortcut characteristic is one of optimization, but the main purpose is to reduce SqlToScalarOperatorTranslator.translate method call in reorderWindowOperator method @packy92 @LiShuMing
private OptExprBuilder window(OptExprBuilder subOpt, List<AnalyticExpr> window) { if (window.isEmpty()) { return subOpt; } /* * Build ProjectOperator of partition expression and order by expression in window function. */ List<Expr> projectExpressions = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { projectExpressions.addAll(analyticExpr.getPartitionExprs()); projectExpressions.addAll(analyticExpr.getOrderByElements() .stream().map(OrderByElement::getExpr).collect(Collectors.toList())); } final ExpressionMapping expressionMapping = subOpt.getExpressionMapping(); boolean allColumnRef = true; Map<Expr, ColumnRefOperator> tempMapping = new HashMap<>(); for (Expr expression : projectExpressions) { ScalarOperator operator = SqlToScalarOperatorTranslator.translate(expression, expressionMapping, columnRefFactory); if (!operator.isColumnRef()) { allColumnRef = false; tempMapping.clear(); break; } else { tempMapping.put(expression, (ColumnRefOperator) operator); } } if (allColumnRef) { expressionMapping.getExpressionToColumns().putAll(tempMapping); } /* * If there is no expression calculate in partition and order by, * there is no need to add ProjectOperator here */ if (!allColumnRef) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope()); List<ColumnRefOperator> fieldMappings = new ArrayList<>(); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (ColumnRefOperator expression : subOpt.getFieldMappings()) { ColumnRefOperator variable = columnRefFactory.create(expression, expression.getType(), expression.isNullable()); projections.put(variable, expression); fieldMappings.add(variable); } outputTranslations.setFieldMappings(fieldMappings); for (Expr expression : subOpt.getExpressionMapping().getAllExpressions()) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } for (Expr expression : projectExpressions) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); subOpt.setExpressionMapping(outputTranslations); subOpt = subOpt.withNewRoot(projectOperator); } /* * If necessary, rewrites the analytic function, window, and/or order-by elements * into a standard format for the purpose of simpler backend execution */ List<WindowTransformer.WindowOperator> windowOperators = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { WindowTransformer.WindowOperator rewriteOperator = WindowTransformer.standardize(analyticExpr); if (windowOperators.contains(rewriteOperator)) { WindowTransformer.WindowOperator windowOperator = windowOperators.get(windowOperators.indexOf(rewriteOperator)); if (rewriteOperator.isSkewed()) { windowOperator.setSkewed(); } windowOperator.addFunction(analyticExpr); } else { windowOperators.add(rewriteOperator); } } List<LogicalWindowOperator> logicalWindowOperators = WindowTransformer.reorderWindowOperator(windowOperators, columnRefFactory, subOpt); for (LogicalWindowOperator logicalWindowOperator : logicalWindowOperators) { subOpt = subOpt.withNewRoot(logicalWindowOperator); } return subOpt; }
}
private OptExprBuilder window(OptExprBuilder subOpt, List<AnalyticExpr> window) { if (window.isEmpty()) { return subOpt; } /* * Build ProjectOperator of partition expression and order by expression in window function. */ List<Expr> projectExpressions = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { projectExpressions.addAll(analyticExpr.getPartitionExprs()); projectExpressions.addAll(analyticExpr.getOrderByElements() .stream().map(OrderByElement::getExpr).collect(Collectors.toList())); } final ExpressionMapping expressionMapping = subOpt.getExpressionMapping(); boolean allColumnRef = true; Map<Expr, ColumnRefOperator> tempMapping = new HashMap<>(); for (Expr expression : projectExpressions) { ScalarOperator operator = SqlToScalarOperatorTranslator.translate(expression, expressionMapping, columnRefFactory); if (!operator.isColumnRef()) { allColumnRef = false; tempMapping.clear(); break; } else { tempMapping.put(expression, (ColumnRefOperator) operator); } } if (allColumnRef) { expressionMapping.getExpressionToColumns().putAll(tempMapping); } /* * If there is no expression calculate in partition and order by, * there is no need to add ProjectOperator here */ if (!allColumnRef) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope()); List<ColumnRefOperator> fieldMappings = new ArrayList<>(); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (ColumnRefOperator expression : subOpt.getFieldMappings()) { ColumnRefOperator variable = columnRefFactory.create(expression, expression.getType(), expression.isNullable()); projections.put(variable, expression); fieldMappings.add(variable); } outputTranslations.setFieldMappings(fieldMappings); for (Expr expression : subOpt.getExpressionMapping().getAllExpressions()) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } for (Expr expression : projectExpressions) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); subOpt.setExpressionMapping(outputTranslations); subOpt = subOpt.withNewRoot(projectOperator); } /* * If necessary, rewrites the analytic function, window, and/or order-by elements * into a standard format for the purpose of simpler backend execution */ List<WindowTransformer.WindowOperator> windowOperators = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { WindowTransformer.WindowOperator rewriteOperator = WindowTransformer.standardize(analyticExpr); if (windowOperators.contains(rewriteOperator)) { WindowTransformer.WindowOperator windowOperator = windowOperators.get(windowOperators.indexOf(rewriteOperator)); if (rewriteOperator.isSkewed()) { windowOperator.setSkewed(); } windowOperator.addFunction(analyticExpr); } else { windowOperators.add(rewriteOperator); } } List<LogicalWindowOperator> logicalWindowOperators = WindowTransformer.reorderWindowOperator(windowOperators, columnRefFactory, subOpt); for (LogicalWindowOperator logicalWindowOperator : logicalWindowOperators) { subOpt = subOpt.withNewRoot(logicalWindowOperator); } return subOpt; }
class QueryTransformer { private final ColumnRefFactory columnRefFactory; private final ConnectContext session; private final List<ColumnRefOperator> correlation = new ArrayList<>(); private final CTETransformerContext cteContext; private final boolean inlineView; private final Map<Operator, ParseNode> optToAstMap; public static final String GROUPING_ID = "GROUPING_ID"; public static final String GROUPING = "GROUPING"; public QueryTransformer(ColumnRefFactory columnRefFactory, ConnectContext session, CTETransformerContext cteContext, boolean inlineView, Map<Operator, ParseNode> optToAstMap) { this.columnRefFactory = columnRefFactory; this.session = session; this.cteContext = cteContext; this.inlineView = inlineView; this.optToAstMap = optToAstMap; } public LogicalPlan plan(SelectRelation queryBlock, ExpressionMapping outer) { OptExprBuilder builder = planFrom(queryBlock.getRelation(), cteContext); builder.setExpressionMapping(new ExpressionMapping(builder.getScope(), builder.getFieldMappings(), outer)); Map<Expr, SlotRef> generatedExprToColumnRef = queryBlock.getGeneratedExprToColumnRef(); ExpressionMapping expressionMapping = builder.getExpressionMapping(); for (Map.Entry<Expr, SlotRef> m : generatedExprToColumnRef.entrySet()) { ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(m.getValue(), builder.getExpressionMapping(), columnRefFactory); expressionMapping.put(m.getKey(), (ColumnRefOperator) scalarOperator); } builder = filter(builder, queryBlock.getPredicate()); builder = aggregate(builder, queryBlock.getGroupBy(), queryBlock.getAggregate(), queryBlock.getGroupingSetsList(), queryBlock.getGroupingFunctionCallExprs()); builder = filter(builder, queryBlock.getHaving()); List<AnalyticExpr> analyticExprList = new ArrayList<>(queryBlock.getOutputAnalytic()); analyticExprList.addAll(queryBlock.getOrderByAnalytic()); builder = window(builder, analyticExprList); if (queryBlock.hasOrderByClause()) { if (!queryBlock.getGroupBy().isEmpty() || !queryBlock.getAggregate().isEmpty()) { List<String> outputNames = new ArrayList<>(queryBlock.getColumnOutputNames()); for (int i = 0; i < queryBlock.getOrderSourceExpressions().size(); ++i) { outputNames.add(queryBlock.getOrderSourceExpressions().get(i).toString()); } builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderSourceExpressions(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), outputNames, builder.getFieldMappings(), queryBlock.getOrderScope(), true); } else { builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), queryBlock.getColumnOutputNames(), builder.getFieldMappings(), queryBlock.getOrderScope(), queryBlock.isDistinct()); } } builder = distinct(builder, queryBlock.isDistinct(), queryBlock.getOutputExpression()); builder = project(builder, Iterables.concat(queryBlock.getOrderByExpressions(), queryBlock.getOutputExpression())); List<ColumnRefOperator> orderByColumns = Lists.newArrayList(); builder = sort(builder, queryBlock.getOrderBy(), orderByColumns); builder = limit(builder, queryBlock.getLimit()); List<ColumnRefOperator> outputColumns = computeOutputs(builder, queryBlock.getOutputExpression(), columnRefFactory); if (!orderByColumns.isEmpty() && !outputColumns.containsAll(orderByColumns)) { long limit = queryBlock.hasLimit() ? queryBlock.getLimit().getLimit() : -1; builder = project(builder, queryBlock.getOutputExpression(), limit); } return new LogicalPlan(builder, outputColumns, correlation); } private static List<ColumnRefOperator> computeOutputs(OptExprBuilder builder, List<Expr> outputExpressions, ColumnRefFactory columnRefFactory) { List<ColumnRefOperator> outputs = new ArrayList<>(); for (Expr expression : outputExpressions) { outputs.add((ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expression, builder.getExpressionMapping(), columnRefFactory)); } return outputs; } private OptExprBuilder planFrom(Relation node, CTETransformerContext cteContext) { TransformerContext transformerContext = new TransformerContext( columnRefFactory, session, new ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields())), cteContext, inlineView, optToAstMap); return new RelationTransformer(transformerContext).visit(node).getRootBuilder(); } private OptExprBuilder projectForOrder(OptExprBuilder subOpt, Iterable<Expr> outputExpression, List<Integer> outputExprInOrderByScope, List<String> outputNames, List<ColumnRefOperator> sourceExpression, Scope scope, boolean withAggregation) { ExpressionMapping outputTranslations = new ExpressionMapping(scope); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); int outputExprIdx = 0; for (Expr expression : outputExpression) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); if (outputExprInOrderByScope.contains(outputExprIdx)) { outputTranslations.putWithSymbol(expression, new SlotRef(null, outputNames.get(outputExprIdx)), columnRefOperator); } else { outputTranslations.putWithSymbol(expression, expression, columnRefOperator); } outputExprIdx++; } if (!withAggregation) { List<ColumnRefOperator> fieldMappings = new ArrayList<>(outputTranslations.getFieldMappings()); for (int i = 0; i < sourceExpression.size(); ++i) { ColumnRefOperator columnRefOperator = sourceExpression.get(i); projections.put(columnRefOperator, columnRefOperator); fieldMappings.set(scope.getRelationFields().size() + i, columnRefOperator); } outputTranslations.setFieldMappings(fieldMappings); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions) { return project(subOpt, expressions, -1); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions, long limit) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (Expr expression : expressions) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); outputTranslations.put(expression, columnRefOperator); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections, limit); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder filter(OptExprBuilder subOpt, Expr predicate) { if (predicate == null) { return subOpt; } Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarPredicate = SqlToScalarOperatorTranslator.translate(predicate, subOpt.getExpressionMapping(), correlation, columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, true); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarPredicate, subOpt, subqueryPlaceholders); scalarPredicate = pair.first; subOpt = pair.second; if (scalarPredicate == null) { return subOpt; } LogicalFilterOperator filterOperator = new LogicalFilterOperator(scalarPredicate); return subOpt.withNewRoot(filterOperator); } private OptExprBuilder limit(OptExprBuilder subOpt, LimitElement limit) { if (limit == null) { return subOpt; } LogicalLimitOperator limitOperator = LogicalLimitOperator.init(limit.getLimit(), limit.getOffset()); return subOpt.withNewRoot(limitOperator); } public OptExprBuilder aggregate(OptExprBuilder subOpt, List<Expr> groupByExpressions, List<FunctionCallExpr> aggregates, List<List<Expr>> groupingSetsList, List<Expr> groupingFunctionCallExprs) { if (aggregates.size() == 0 && groupByExpressions.size() == 0) { return subOpt; } List<FunctionCallExpr> copyAggregates; if (groupingSetsList != null) { copyAggregates = aggregates.stream().map(e -> (FunctionCallExpr) e.clone()) .collect(Collectors.toList()); for (Expr groupBy : groupByExpressions) { copyAggregates.replaceAll( root -> (FunctionCallExpr) replaceExprBottomUp(root, groupBy, new CloneExpr(groupBy))); } } else { copyAggregates = aggregates; } ImmutableList.Builder<Expr> arguments = ImmutableList.builder(); copyAggregates.stream().filter(f -> !f.getParams().isStar()) .map(TreeNode::getChildren).flatMap(List::stream) .filter(e -> !(e.isConstant())).forEach(arguments::add); Iterable<Expr> inputs = Iterables.concat(groupByExpressions, arguments.build()); if (!Iterables.isEmpty(inputs)) { subOpt = project(subOpt, inputs); } ExpressionMapping groupingTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); List<ColumnRefOperator> groupByColumnRefs = new ArrayList<>(); boolean groupAllConst = groupByExpressions.stream().allMatch(Expr::isConstant); for (Expr groupingItem : groupByExpressions) { if (groupingItem.isConstant() && !(groupAllConst && groupByColumnRefs.isEmpty()) && groupingSetsList == null) { continue; } ScalarOperator groupingKey = SqlToScalarOperatorTranslator.translate(groupingItem, subOpt.getExpressionMapping(), columnRefFactory); ColumnRefOperator colRef = (ColumnRefOperator) groupingKey; if (!groupByColumnRefs.contains(colRef)) { groupByColumnRefs.add(colRef); } groupingTranslations.put(groupingItem, colRef); } Map<ColumnRefOperator, CallOperator> aggregationsMap = Maps.newHashMap(); for (int i = 0; i < aggregates.size(); i++) { FunctionCallExpr copyAggregate = copyAggregates.get(i); ScalarOperator aggCallOperator = SqlToScalarOperatorTranslator.translate(copyAggregate, subOpt.getExpressionMapping(), columnRefFactory); CallOperator aggOperator = (CallOperator) aggCallOperator; ColumnRefOperator colRef = columnRefFactory.create(aggOperator.getFnName(), copyAggregate.getType(), copyAggregate.isNullable()); aggregationsMap.put(colRef, aggOperator); groupingTranslations.put(aggregates.get(i), colRef); } if (groupingSetsList != null) { /* * repeatOutput is used to record the output column of repeatOperator, * this output column only represents the generated grouping_id column */ List<ColumnRefOperator> repeatOutput = new ArrayList<>(); /* * groupingIdsBitSets is used to record the complete grouping_id, * which contains all the group by columns. * groupingIds is converted by groupingIdsBitSets */ ArrayList<BitSet> groupingIdsBitSets = new ArrayList<>(); List<List<Long>> groupingIds = new ArrayList<>(); /* * repeatColumnRefList is used to record the column reference * that needs to be repeatedly calculated. * This column reference is come from the child of repeat operator */ List<List<ColumnRefOperator>> repeatColumnRefList = new ArrayList<>(); for (List<Expr> grouping : groupingSetsList) { List<ColumnRefOperator> repeatColumnRef = new ArrayList<>(); BitSet groupingIdBitSet = new BitSet(groupByColumnRefs.size()); groupingIdBitSet.set(0, groupByExpressions.size(), true); for (Expr groupingField : grouping) { ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate( groupingField, subOpt.getExpressionMapping(), columnRefFactory); repeatColumnRef.add(groupingKey); if (groupByColumnRefs.contains(groupingKey)) { groupingIdBitSet.set(groupByColumnRefs.indexOf(groupingKey), false); } } groupingIdsBitSets.add(groupingIdBitSet); repeatColumnRefList.add(repeatColumnRef); } ColumnRefOperator grouping = columnRefFactory.create(GROUPING_ID, Type.BIGINT, false); List<Long> groupingID = new ArrayList<>(); for (BitSet bitSet : groupingIdsBitSets) { long gid = Utils.convertBitSetToLong(bitSet, groupByColumnRefs.size()); while (groupingID.contains(gid)) { gid += Math.pow(2, groupByColumnRefs.size()); } groupingID.add(gid); } groupingIds.add(groupingID); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); for (Expr groupingFunction : groupingFunctionCallExprs) { grouping = columnRefFactory.create(GROUPING, Type.BIGINT, false); ArrayList<BitSet> tempGroupingIdsBitSets = new ArrayList<>(); for (int i = 0; i < repeatColumnRefList.size(); ++i) { tempGroupingIdsBitSets.add(new BitSet(groupingFunction.getChildren().size())); } for (int childIdx = 0; childIdx < groupingFunction.getChildren().size(); ++childIdx) { SlotRef slotRef = (SlotRef) groupingFunction.getChild(childIdx); ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(slotRef, subOpt.getExpressionMapping(), columnRefFactory); for (List<ColumnRefOperator> repeatColumns : repeatColumnRefList) { if (repeatColumns.contains(groupingKey)) { for (int repeatColIdx = 0; repeatColIdx < repeatColumnRefList.size(); ++repeatColIdx) { tempGroupingIdsBitSets.get(repeatColIdx).set(childIdx, groupingIdsBitSets.get(repeatColIdx) .get(groupByColumnRefs.indexOf(groupingKey))); } } } } groupingTranslations.put(groupingFunction, grouping); groupingIds.add(tempGroupingIdsBitSets.stream().map(bitset -> Utils.convertBitSetToLong(bitset, groupingFunction.getChildren().size())) .collect(Collectors.toList())); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); } LogicalRepeatOperator repeatOperator = new LogicalRepeatOperator(repeatOutput, repeatColumnRefList, groupingIds); subOpt = new OptExprBuilder(repeatOperator, Lists.newArrayList(subOpt), groupingTranslations); } return new OptExprBuilder( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumnRefs, aggregationsMap), Lists.newArrayList(subOpt), groupingTranslations); } private Expr replaceExprBottomUp(Expr root, Expr pattern, Expr replace) { if (root.getChildren().size() > 0) { for (int i = 0; i < root.getChildren().size(); i++) { Expr result = replaceExprBottomUp(root.getChild(i), pattern, replace); root.setChild(i, result); } } if (root.equals(pattern)) { return replace; } return root; } private OptExprBuilder sort(OptExprBuilder subOpt, List<OrderByElement> orderByExpressions, List<ColumnRefOperator> orderByColumns) { if (orderByExpressions.isEmpty()) { return subOpt; } List<Ordering> orderings = new ArrayList<>(); for (OrderByElement item : orderByExpressions) { if (item.getExpr().isLiteral()) { continue; } ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate(item.getExpr(), subOpt.getExpressionMapping(), columnRefFactory); Ordering ordering = new Ordering(column, item.getIsAsc(), OrderByElement.nullsFirst(item.getNullsFirstParam())); if (!orderByColumns.contains(column)) { orderings.add(ordering); orderByColumns.add(column); } } if (orderByColumns.isEmpty()) { return subOpt; } LogicalTopNOperator sortOperator = new LogicalTopNOperator(orderings); return subOpt.withNewRoot(sortOperator); } private OptExprBuilder distinct(OptExprBuilder subOpt, boolean isDistinct, List<Expr> outputExpressions) { if (isDistinct) { subOpt = project(subOpt, outputExpressions); List<ColumnRefOperator> groupByColumns = Lists.newArrayList(); for (Expr expr : outputExpressions) { ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expr, subOpt.getExpressionMapping(), columnRefFactory); if (!groupByColumns.contains(column)) { groupByColumns.add(column); } } return subOpt.withNewRoot( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumns, new HashMap<>())); } else { return subOpt; } } private ColumnRefOperator getOrCreateColumnRefOperator(Expr expression, ScalarOperator scalarOperator, Map<ColumnRefOperator, ScalarOperator> projections) { ColumnRefOperator columnRefOperator; if (scalarOperator.isColumnRef()) { columnRefOperator = (ColumnRefOperator) scalarOperator; } else if (scalarOperator.isVariable() && projections.containsValue(scalarOperator)) { columnRefOperator = projections.entrySet().stream() .filter(e -> scalarOperator.equals(e.getValue())) .findAny() .map(Map.Entry::getKey) .orElse(null); Preconditions.checkNotNull(columnRefOperator); } else { columnRefOperator = columnRefFactory.create(expression, expression.getType(), scalarOperator.isNullable()); } return columnRefOperator; } }
class QueryTransformer { private final ColumnRefFactory columnRefFactory; private final ConnectContext session; private final List<ColumnRefOperator> correlation = new ArrayList<>(); private final CTETransformerContext cteContext; private final boolean inlineView; private final Map<Operator, ParseNode> optToAstMap; public static final String GROUPING_ID = "GROUPING_ID"; public static final String GROUPING = "GROUPING"; public QueryTransformer(ColumnRefFactory columnRefFactory, ConnectContext session, CTETransformerContext cteContext, boolean inlineView, Map<Operator, ParseNode> optToAstMap) { this.columnRefFactory = columnRefFactory; this.session = session; this.cteContext = cteContext; this.inlineView = inlineView; this.optToAstMap = optToAstMap; } public LogicalPlan plan(SelectRelation queryBlock, ExpressionMapping outer) { OptExprBuilder builder = planFrom(queryBlock.getRelation(), cteContext); builder.setExpressionMapping(new ExpressionMapping(builder.getScope(), builder.getFieldMappings(), outer)); Map<Expr, SlotRef> generatedExprToColumnRef = queryBlock.getGeneratedExprToColumnRef(); ExpressionMapping expressionMapping = builder.getExpressionMapping(); for (Map.Entry<Expr, SlotRef> m : generatedExprToColumnRef.entrySet()) { ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(m.getValue(), builder.getExpressionMapping(), columnRefFactory); expressionMapping.put(m.getKey(), (ColumnRefOperator) scalarOperator); } builder = filter(builder, queryBlock.getPredicate()); builder = aggregate(builder, queryBlock.getGroupBy(), queryBlock.getAggregate(), queryBlock.getGroupingSetsList(), queryBlock.getGroupingFunctionCallExprs()); builder = filter(builder, queryBlock.getHaving()); List<AnalyticExpr> analyticExprList = new ArrayList<>(queryBlock.getOutputAnalytic()); analyticExprList.addAll(queryBlock.getOrderByAnalytic()); builder = window(builder, analyticExprList); if (queryBlock.hasOrderByClause()) { if (!queryBlock.getGroupBy().isEmpty() || !queryBlock.getAggregate().isEmpty()) { List<String> outputNames = new ArrayList<>(queryBlock.getColumnOutputNames()); for (int i = 0; i < queryBlock.getOrderSourceExpressions().size(); ++i) { outputNames.add(queryBlock.getOrderSourceExpressions().get(i).toString()); } builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderSourceExpressions(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), outputNames, builder.getFieldMappings(), queryBlock.getOrderScope(), true); } else { builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), queryBlock.getColumnOutputNames(), builder.getFieldMappings(), queryBlock.getOrderScope(), queryBlock.isDistinct()); } } builder = distinct(builder, queryBlock.isDistinct(), queryBlock.getOutputExpression()); builder = project(builder, Iterables.concat(queryBlock.getOrderByExpressions(), queryBlock.getOutputExpression())); List<ColumnRefOperator> orderByColumns = Lists.newArrayList(); builder = sort(builder, queryBlock.getOrderBy(), orderByColumns); builder = limit(builder, queryBlock.getLimit()); List<ColumnRefOperator> outputColumns = computeOutputs(builder, queryBlock.getOutputExpression(), columnRefFactory); if (!orderByColumns.isEmpty() && !outputColumns.containsAll(orderByColumns)) { long limit = queryBlock.hasLimit() ? queryBlock.getLimit().getLimit() : -1; builder = project(builder, queryBlock.getOutputExpression(), limit); } return new LogicalPlan(builder, outputColumns, correlation); } private static List<ColumnRefOperator> computeOutputs(OptExprBuilder builder, List<Expr> outputExpressions, ColumnRefFactory columnRefFactory) { List<ColumnRefOperator> outputs = new ArrayList<>(); for (Expr expression : outputExpressions) { outputs.add((ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expression, builder.getExpressionMapping(), columnRefFactory)); } return outputs; } private OptExprBuilder planFrom(Relation node, CTETransformerContext cteContext) { TransformerContext transformerContext = new TransformerContext( columnRefFactory, session, new ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields())), cteContext, inlineView, optToAstMap); return new RelationTransformer(transformerContext).visit(node).getRootBuilder(); } private OptExprBuilder projectForOrder(OptExprBuilder subOpt, Iterable<Expr> outputExpression, List<Integer> outputExprInOrderByScope, List<String> outputNames, List<ColumnRefOperator> sourceExpression, Scope scope, boolean withAggregation) { ExpressionMapping outputTranslations = new ExpressionMapping(scope); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); int outputExprIdx = 0; for (Expr expression : outputExpression) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); if (outputExprInOrderByScope.contains(outputExprIdx)) { outputTranslations.putWithSymbol(expression, new SlotRef(null, outputNames.get(outputExprIdx)), columnRefOperator); } else { outputTranslations.putWithSymbol(expression, expression, columnRefOperator); } outputExprIdx++; } if (!withAggregation) { List<ColumnRefOperator> fieldMappings = new ArrayList<>(outputTranslations.getFieldMappings()); for (int i = 0; i < sourceExpression.size(); ++i) { ColumnRefOperator columnRefOperator = sourceExpression.get(i); projections.put(columnRefOperator, columnRefOperator); fieldMappings.set(scope.getRelationFields().size() + i, columnRefOperator); } outputTranslations.setFieldMappings(fieldMappings); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions) { return project(subOpt, expressions, -1); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions, long limit) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (Expr expression : expressions) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); outputTranslations.put(expression, columnRefOperator); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections, limit); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder filter(OptExprBuilder subOpt, Expr predicate) { if (predicate == null) { return subOpt; } Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarPredicate = SqlToScalarOperatorTranslator.translate(predicate, subOpt.getExpressionMapping(), correlation, columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, true); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarPredicate, subOpt, subqueryPlaceholders); scalarPredicate = pair.first; subOpt = pair.second; if (scalarPredicate == null) { return subOpt; } LogicalFilterOperator filterOperator = new LogicalFilterOperator(scalarPredicate); return subOpt.withNewRoot(filterOperator); } private OptExprBuilder limit(OptExprBuilder subOpt, LimitElement limit) { if (limit == null) { return subOpt; } LogicalLimitOperator limitOperator = LogicalLimitOperator.init(limit.getLimit(), limit.getOffset()); return subOpt.withNewRoot(limitOperator); } public OptExprBuilder aggregate(OptExprBuilder subOpt, List<Expr> groupByExpressions, List<FunctionCallExpr> aggregates, List<List<Expr>> groupingSetsList, List<Expr> groupingFunctionCallExprs) { if (aggregates.size() == 0 && groupByExpressions.size() == 0) { return subOpt; } List<FunctionCallExpr> copyAggregates; if (groupingSetsList != null) { copyAggregates = aggregates.stream().map(e -> (FunctionCallExpr) e.clone()) .collect(Collectors.toList()); for (Expr groupBy : groupByExpressions) { copyAggregates.replaceAll( root -> (FunctionCallExpr) replaceExprBottomUp(root, groupBy, new CloneExpr(groupBy))); } } else { copyAggregates = aggregates; } ImmutableList.Builder<Expr> arguments = ImmutableList.builder(); copyAggregates.stream().filter(f -> !f.getParams().isStar()) .map(TreeNode::getChildren).flatMap(List::stream) .filter(e -> !(e.isConstant())).forEach(arguments::add); Iterable<Expr> inputs = Iterables.concat(groupByExpressions, arguments.build()); if (!Iterables.isEmpty(inputs)) { subOpt = project(subOpt, inputs); } ExpressionMapping groupingTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); List<ColumnRefOperator> groupByColumnRefs = new ArrayList<>(); boolean groupAllConst = groupByExpressions.stream().allMatch(Expr::isConstant); for (Expr groupingItem : groupByExpressions) { if (groupingItem.isConstant() && !(groupAllConst && groupByColumnRefs.isEmpty()) && groupingSetsList == null) { continue; } ScalarOperator groupingKey = SqlToScalarOperatorTranslator.translate(groupingItem, subOpt.getExpressionMapping(), columnRefFactory); ColumnRefOperator colRef = (ColumnRefOperator) groupingKey; if (!groupByColumnRefs.contains(colRef)) { groupByColumnRefs.add(colRef); } groupingTranslations.put(groupingItem, colRef); } Map<ColumnRefOperator, CallOperator> aggregationsMap = Maps.newHashMap(); for (int i = 0; i < aggregates.size(); i++) { FunctionCallExpr copyAggregate = copyAggregates.get(i); ScalarOperator aggCallOperator = SqlToScalarOperatorTranslator.translate(copyAggregate, subOpt.getExpressionMapping(), columnRefFactory); CallOperator aggOperator = (CallOperator) aggCallOperator; ColumnRefOperator colRef = columnRefFactory.create(aggOperator.getFnName(), copyAggregate.getType(), copyAggregate.isNullable()); aggregationsMap.put(colRef, aggOperator); groupingTranslations.put(aggregates.get(i), colRef); } if (groupingSetsList != null) { /* * repeatOutput is used to record the output column of repeatOperator, * this output column only represents the generated grouping_id column */ List<ColumnRefOperator> repeatOutput = new ArrayList<>(); /* * groupingIdsBitSets is used to record the complete grouping_id, * which contains all the group by columns. * groupingIds is converted by groupingIdsBitSets */ ArrayList<BitSet> groupingIdsBitSets = new ArrayList<>(); List<List<Long>> groupingIds = new ArrayList<>(); /* * repeatColumnRefList is used to record the column reference * that needs to be repeatedly calculated. * This column reference is come from the child of repeat operator */ List<List<ColumnRefOperator>> repeatColumnRefList = new ArrayList<>(); for (List<Expr> grouping : groupingSetsList) { List<ColumnRefOperator> repeatColumnRef = new ArrayList<>(); BitSet groupingIdBitSet = new BitSet(groupByColumnRefs.size()); groupingIdBitSet.set(0, groupByExpressions.size(), true); for (Expr groupingField : grouping) { ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate( groupingField, subOpt.getExpressionMapping(), columnRefFactory); repeatColumnRef.add(groupingKey); if (groupByColumnRefs.contains(groupingKey)) { groupingIdBitSet.set(groupByColumnRefs.indexOf(groupingKey), false); } } groupingIdsBitSets.add(groupingIdBitSet); repeatColumnRefList.add(repeatColumnRef); } ColumnRefOperator grouping = columnRefFactory.create(GROUPING_ID, Type.BIGINT, false); List<Long> groupingID = new ArrayList<>(); for (BitSet bitSet : groupingIdsBitSets) { long gid = Utils.convertBitSetToLong(bitSet, groupByColumnRefs.size()); while (groupingID.contains(gid)) { gid += Math.pow(2, groupByColumnRefs.size()); } groupingID.add(gid); } groupingIds.add(groupingID); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); for (Expr groupingFunction : groupingFunctionCallExprs) { grouping = columnRefFactory.create(GROUPING, Type.BIGINT, false); ArrayList<BitSet> tempGroupingIdsBitSets = new ArrayList<>(); for (int i = 0; i < repeatColumnRefList.size(); ++i) { tempGroupingIdsBitSets.add(new BitSet(groupingFunction.getChildren().size())); } for (int childIdx = 0; childIdx < groupingFunction.getChildren().size(); ++childIdx) { SlotRef slotRef = (SlotRef) groupingFunction.getChild(childIdx); ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(slotRef, subOpt.getExpressionMapping(), columnRefFactory); for (List<ColumnRefOperator> repeatColumns : repeatColumnRefList) { if (repeatColumns.contains(groupingKey)) { for (int repeatColIdx = 0; repeatColIdx < repeatColumnRefList.size(); ++repeatColIdx) { tempGroupingIdsBitSets.get(repeatColIdx).set(childIdx, groupingIdsBitSets.get(repeatColIdx) .get(groupByColumnRefs.indexOf(groupingKey))); } } } } groupingTranslations.put(groupingFunction, grouping); groupingIds.add(tempGroupingIdsBitSets.stream().map(bitset -> Utils.convertBitSetToLong(bitset, groupingFunction.getChildren().size())) .collect(Collectors.toList())); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); } LogicalRepeatOperator repeatOperator = new LogicalRepeatOperator(repeatOutput, repeatColumnRefList, groupingIds); subOpt = new OptExprBuilder(repeatOperator, Lists.newArrayList(subOpt), groupingTranslations); } return new OptExprBuilder( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumnRefs, aggregationsMap), Lists.newArrayList(subOpt), groupingTranslations); } private Expr replaceExprBottomUp(Expr root, Expr pattern, Expr replace) { if (root.getChildren().size() > 0) { for (int i = 0; i < root.getChildren().size(); i++) { Expr result = replaceExprBottomUp(root.getChild(i), pattern, replace); root.setChild(i, result); } } if (root.equals(pattern)) { return replace; } return root; } private OptExprBuilder sort(OptExprBuilder subOpt, List<OrderByElement> orderByExpressions, List<ColumnRefOperator> orderByColumns) { if (orderByExpressions.isEmpty()) { return subOpt; } List<Ordering> orderings = new ArrayList<>(); for (OrderByElement item : orderByExpressions) { if (item.getExpr().isLiteral()) { continue; } ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate(item.getExpr(), subOpt.getExpressionMapping(), columnRefFactory); Ordering ordering = new Ordering(column, item.getIsAsc(), OrderByElement.nullsFirst(item.getNullsFirstParam())); if (!orderByColumns.contains(column)) { orderings.add(ordering); orderByColumns.add(column); } } if (orderByColumns.isEmpty()) { return subOpt; } LogicalTopNOperator sortOperator = new LogicalTopNOperator(orderings); return subOpt.withNewRoot(sortOperator); } private OptExprBuilder distinct(OptExprBuilder subOpt, boolean isDistinct, List<Expr> outputExpressions) { if (isDistinct) { subOpt = project(subOpt, outputExpressions); List<ColumnRefOperator> groupByColumns = Lists.newArrayList(); for (Expr expr : outputExpressions) { ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expr, subOpt.getExpressionMapping(), columnRefFactory); if (!groupByColumns.contains(column)) { groupByColumns.add(column); } } return subOpt.withNewRoot( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumns, new HashMap<>())); } else { return subOpt; } } private ColumnRefOperator getOrCreateColumnRefOperator(Expr expression, ScalarOperator scalarOperator, Map<ColumnRefOperator, ScalarOperator> projections) { ColumnRefOperator columnRefOperator; if (scalarOperator.isColumnRef()) { columnRefOperator = (ColumnRefOperator) scalarOperator; } else if (scalarOperator.isVariable() && projections.containsValue(scalarOperator)) { columnRefOperator = projections.entrySet().stream() .filter(e -> scalarOperator.equals(e.getValue())) .findAny() .map(Map.Entry::getKey) .orElse(null); Preconditions.checkNotNull(columnRefOperator); } else { columnRefOperator = columnRefFactory.create(expression, expression.getType(), scalarOperator.isNullable()); } return columnRefOperator; } }
I specifically did not want to use it, in case there is an exception (or restart of host-admin) after createTempFile and before the move: It would leave stray files in the directory.
private void writeBytes(byte[] content, boolean atomic) { if (atomic) { String tmpPath = path.toPath().toString() + ".FileSyncTmp"; new UnixPath(path.toPath().getFileSystem().getPath(tmpPath)) .writeBytes(content) .atomicMove(path.toPath()); } else { path.writeBytes(content); } }
String tmpPath = path.toPath().toString() + ".FileSyncTmp";
private void writeBytes(byte[] content, boolean atomic) { if (atomic) { String tmpPath = path.toPath().toString() + ".FileSyncTmp"; new UnixPath(path.toPath().getFileSystem().getPath(tmpPath)) .writeBytes(content) .atomicMove(path.toPath()); } else { path.writeBytes(content); } }
class FileSync { private static final Logger logger = Logger.getLogger(FileSync.class.getName()); private final UnixPath path; private final FileContentCache contentCache; public FileSync(Path path) { this.path = new UnixPath(path); this.contentCache = new FileContentCache(this.path); } public boolean convergeTo(TaskContext taskContext, PartialFileData partialFileData) { return convergeTo(taskContext, partialFileData, false); } /** * CPU, I/O, and memory usage is optimized for repeated calls with the same arguments. * * @param atomicWrite Whether to write updates to a temporary file in the same directory, and atomically move it * to path. Ensures the file cannot be read while in the middle of writing it. * @return true if the system was modified: content was written, or owner was set, etc. * system is only modified if necessary (different). */ public boolean convergeTo(TaskContext taskContext, PartialFileData partialFileData, boolean atomicWrite) { FileAttributesCache currentAttributes = new FileAttributesCache(path); boolean modifiedSystem = maybeUpdateContent(taskContext, partialFileData.getContent(), currentAttributes, atomicWrite); AttributeSync attributeSync = new AttributeSync(path.toPath()).with(partialFileData); modifiedSystem |= attributeSync.converge(taskContext, currentAttributes); return modifiedSystem; } private boolean maybeUpdateContent(TaskContext taskContext, Optional<byte[]> content, FileAttributesCache currentAttributes, boolean atomicWrite) { if (!content.isPresent()) { return false; } if (!currentAttributes.exists()) { taskContext.recordSystemModification(logger, "Creating file " + path); path.createParents(); writeBytes(content.get(), atomicWrite); contentCache.updateWith(content.get(), currentAttributes.forceGet().lastModifiedTime()); return true; } if (Arrays.equals(content.get(), contentCache.get(currentAttributes.get().lastModifiedTime()))) { return false; } else { taskContext.recordSystemModification(logger, "Patching file " + path); writeBytes(content.get(), atomicWrite); contentCache.updateWith(content.get(), currentAttributes.forceGet().lastModifiedTime()); return true; } } }
class FileSync { private static final Logger logger = Logger.getLogger(FileSync.class.getName()); private final UnixPath path; private final FileContentCache contentCache; public FileSync(Path path) { this.path = new UnixPath(path); this.contentCache = new FileContentCache(this.path); } public boolean convergeTo(TaskContext taskContext, PartialFileData partialFileData) { return convergeTo(taskContext, partialFileData, false); } /** * CPU, I/O, and memory usage is optimized for repeated calls with the same arguments. * * @param atomicWrite Whether to write updates to a temporary file in the same directory, and atomically move it * to path. Ensures the file cannot be read while in the middle of writing it. * @return true if the system was modified: content was written, or owner was set, etc. * system is only modified if necessary (different). */ public boolean convergeTo(TaskContext taskContext, PartialFileData partialFileData, boolean atomicWrite) { FileAttributesCache currentAttributes = new FileAttributesCache(path); boolean modifiedSystem = maybeUpdateContent(taskContext, partialFileData.getContent(), currentAttributes, atomicWrite); AttributeSync attributeSync = new AttributeSync(path.toPath()).with(partialFileData); modifiedSystem |= attributeSync.converge(taskContext, currentAttributes); return modifiedSystem; } private boolean maybeUpdateContent(TaskContext taskContext, Optional<byte[]> content, FileAttributesCache currentAttributes, boolean atomicWrite) { if (!content.isPresent()) { return false; } if (!currentAttributes.exists()) { taskContext.recordSystemModification(logger, "Creating file " + path); path.createParents(); writeBytes(content.get(), atomicWrite); contentCache.updateWith(content.get(), currentAttributes.forceGet().lastModifiedTime()); return true; } if (Arrays.equals(content.get(), contentCache.get(currentAttributes.get().lastModifiedTime()))) { return false; } else { taskContext.recordSystemModification(logger, "Patching file " + path); writeBytes(content.get(), atomicWrite); contentCache.updateWith(content.get(), currentAttributes.forceGet().lastModifiedTime()); return true; } } }
I changed it to check the result of the operation into the assert
private void testCopy(S3Options options) throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId sourcePath = S3ResourceId.fromUri("s3: S3ResourceId destinationPath = S3ResourceId.fromUri("s3: HeadObjectResponse.Builder builder = HeadObjectResponse.builder().contentLength(0L); if (getSSECustomerKeyMd5(options) != null) { builder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } HeadObjectResponse headObjectResponse = builder.build(); assertGetObjectHead( s3FileSystem, createObjectHeadRequest(sourcePath, options), options, headObjectResponse); s3FileSystem.copy(sourcePath, destinationPath); verify(s3FileSystem.getS3Client(), times(1)).copyObject(any(CopyObjectRequest.class)); headObjectResponse.toBuilder().contentLength(5_368_709_120L).build(); assertGetObjectHead( s3FileSystem, createObjectHeadRequest(sourcePath, options), options, headObjectResponse); try { s3FileSystem.copy(sourcePath, destinationPath); } catch (NullPointerException e) { } verify(s3FileSystem.getS3Client(), never()).copyObject((CopyObjectRequest) null); }
headObjectResponse.toBuilder().contentLength(5_368_709_120L).build();
private void testCopy(S3Options options) throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId sourcePath = S3ResourceId.fromUri("s3: S3ResourceId destinationPath = S3ResourceId.fromUri("s3: HeadObjectResponse.Builder builder = HeadObjectResponse.builder().contentLength(0L); if (getSSECustomerKeyMd5(options) != null) { builder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } HeadObjectResponse headObjectResponse = builder.build(); assertGetObjectHead( s3FileSystem, createObjectHeadRequest(sourcePath, options), options, headObjectResponse); s3FileSystem.copy(sourcePath, destinationPath); verify(s3FileSystem.getS3Client(), times(1)).copyObject(any(CopyObjectRequest.class)); HeadObjectResponse bigHeadObjectResponse = headObjectResponse.toBuilder().contentLength(5_368_709_120L).build(); assertGetObjectHead( s3FileSystem, createObjectHeadRequest(sourcePath, options), options, bigHeadObjectResponse); try { s3FileSystem.copy(sourcePath, destinationPath); } catch (NullPointerException e) { } verify(s3FileSystem.getS3Client(), never()).copyObject((CopyObjectRequest) null); }
class S3FileSystemTest { private static S3Mock api; private static S3Client client; @BeforeClass public static void beforeClass() { api = new S3Mock.Builder().withInMemoryBackend().withPort(8002).build(); Http.ServerBinding binding = api.start(); URI endpoint = URI.create("http: S3Configuration s3Configuration = S3Configuration.builder().pathStyleAccessEnabled(true).build(); client = S3Client.builder() .region(Region.US_WEST_1) .serviceConfiguration(s3Configuration) .endpointOverride(endpoint) .credentialsProvider(AnonymousCredentialsProvider.create()) .build(); } @AfterClass public static void afterClass() { api.stop(); } @Test public void testGetScheme() { S3FileSystem s3FileSystem = new S3FileSystem(s3Options()); assertEquals("s3", s3FileSystem.getScheme()); } @Test public void testGetPathStyleAccessEnabled() throws URISyntaxException { S3FileSystem s3FileSystem = new S3FileSystem(s3OptionsWithPathStyleAccessEnabled()); URL s3Url = s3FileSystem .getS3Client() .utilities() .getUrl(GetUrlRequest.builder().bucket("bucket").key("file").build()); assertEquals("https: } @Test public void testCopy() throws IOException { testCopy(s3Options()); testCopy(s3OptionsWithSSECustomerKey()); } private HeadObjectRequest createObjectHeadRequest(S3ResourceId path, S3Options options) { return HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .sseCustomerKey(options.getSSECustomerKey().getKey()) .sseCustomerAlgorithm(options.getSSECustomerKey().getAlgorithm()) .build(); } private void assertGetObjectHead( S3FileSystem s3FileSystem, HeadObjectRequest request, S3Options options, HeadObjectResponse objectMetadata) { when(s3FileSystem.getS3Client().headObject(argThat(new GetHeadObjectRequestMatcher(request)))) .thenReturn(objectMetadata); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem.getS3Client().headObject(request).sseCustomerKeyMD5()); } @Test public void testAtomicCopy() { testAtomicCopy(s3Options()); testAtomicCopy(s3OptionsWithSSECustomerKey()); } private void testAtomicCopy(S3Options options) { S3FileSystem s3FileSystem = buildMockedS3FileSystem(options); S3ResourceId sourcePath = S3ResourceId.fromUri("s3: S3ResourceId destinationPath = S3ResourceId.fromUri("s3: CopyObjectResponse.Builder builder = CopyObjectResponse.builder(); if (getSSECustomerKeyMd5(options) != null) { builder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } CopyObjectResponse copyObjectResponse = builder.build(); CopyObjectRequest copyObjectRequest = CopyObjectRequest.builder() .copySource(sourcePath.getBucket() + "/" + sourcePath.getKey()) .destinationBucket(destinationPath.getBucket()) .destinationBucket(destinationPath.getKey()) .sseCustomerKey(options.getSSECustomerKey().getKey()) .copySourceSSECustomerAlgorithm(options.getSSECustomerKey().getAlgorithm()) .build(); when(s3FileSystem.getS3Client().copyObject(any(CopyObjectRequest.class))) .thenReturn(copyObjectResponse); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem.getS3Client().copyObject(copyObjectRequest).sseCustomerKeyMD5()); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().build(); s3FileSystem.atomicCopy(sourcePath, destinationPath, headObjectResponse); verify(s3FileSystem.getS3Client(), times(2)).copyObject(any(CopyObjectRequest.class)); } @Test public void testMultipartCopy() { testMultipartCopy(s3Options()); testMultipartCopy(s3OptionsWithSSECustomerKey()); } private void testMultipartCopy(S3Options options) { S3FileSystem s3FileSystem = buildMockedS3FileSystem(options); S3ResourceId sourcePath = S3ResourceId.fromUri("s3: S3ResourceId destinationPath = S3ResourceId.fromUri("s3: CreateMultipartUploadResponse.Builder builder = CreateMultipartUploadResponse.builder().uploadId("upload-id"); if (getSSECustomerKeyMd5(options) != null) { builder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } CreateMultipartUploadResponse createMultipartUploadResponse = builder.build(); when(s3FileSystem.getS3Client().createMultipartUpload(any(CreateMultipartUploadRequest.class))) .thenReturn(createMultipartUploadResponse); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem .getS3Client() .createMultipartUpload( CreateMultipartUploadRequest.builder() .bucket(destinationPath.getBucket()) .key(destinationPath.getKey()) .build()) .sseCustomerKeyMD5()); HeadObjectResponse.Builder headObjectResponseBuilder = HeadObjectResponse.builder() .contentLength((long) (options.getS3UploadBufferSizeBytes() * 1.5)) .contentEncoding("read-seek-efficient"); if (getSSECustomerKeyMd5(options) != null) { headObjectResponseBuilder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } HeadObjectResponse headObjectResponse = headObjectResponseBuilder.build(); assertGetObjectHead( s3FileSystem, createObjectHeadRequest(sourcePath, options), options, headObjectResponse); CopyPartResult copyPartResult1 = CopyPartResult.builder().eTag("etag-1").build(); CopyPartResult copyPartResult2 = CopyPartResult.builder().eTag("etag-2").build(); UploadPartCopyResponse.Builder uploadPartCopyResponseBuilder1 = UploadPartCopyResponse.builder().copyPartResult(copyPartResult1); UploadPartCopyResponse.Builder uploadPartCopyResponseBuilder2 = UploadPartCopyResponse.builder().copyPartResult(copyPartResult2); if (getSSECustomerKeyMd5(options) != null) { uploadPartCopyResponseBuilder1.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); uploadPartCopyResponseBuilder2.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } UploadPartCopyResponse uploadPartCopyResponse1 = uploadPartCopyResponseBuilder1.build(); UploadPartCopyResponse uploadPartCopyResponse2 = uploadPartCopyResponseBuilder2.build(); UploadPartCopyRequest uploadPartCopyRequest = UploadPartCopyRequest.builder() .sseCustomerKey(options.getSSECustomerKey().getKey()) .build(); when(s3FileSystem.getS3Client().uploadPartCopy(any(UploadPartCopyRequest.class))) .thenReturn(uploadPartCopyResponse1) .thenReturn(uploadPartCopyResponse2); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem.getS3Client().uploadPartCopy(uploadPartCopyRequest).sseCustomerKeyMD5()); s3FileSystem.multipartCopy(sourcePath, destinationPath, headObjectResponse); verify(s3FileSystem.getS3Client(), times(1)) .completeMultipartUpload(any(CompleteMultipartUploadRequest.class)); } @Test public void deleteThousandsOfObjectsInMultipleBuckets() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); List<String> buckets = ImmutableList.of("bucket1", "bucket2"); List<String> keys = new ArrayList<>(); for (int i = 0; i < 2500; i++) { keys.add(String.format("key-%d", i)); } List<S3ResourceId> paths = new ArrayList<>(); for (String bucket : buckets) { for (String key : keys) { paths.add(S3ResourceId.fromComponents(bucket, key)); } } s3FileSystem.delete(paths); verify(s3FileSystem.getS3Client(), times(6)).deleteObjects(any(DeleteObjectsRequest.class)); } @Test public void matchNonGlob() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: long lastModifiedMillis = 1540000000000L; HeadObjectResponse headObjectResponse = HeadObjectResponse.builder() .contentLength(100L) .contentEncoding("read-seek-efficient") .lastModified(Instant.ofEpochMilli(lastModifiedMillis)) .build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenReturn(headObjectResponse); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setSizeBytes(100) .setLastModifiedMillis(lastModifiedMillis) .setResourceId(path) .setIsReadSeekEfficient(true) .build()))); } @Test public void matchNonGlobNotReadSeekEfficient() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: long lastModifiedMillis = 1540000000000L; HeadObjectResponse headObjectResponse = HeadObjectResponse.builder() .contentLength(100L) .lastModified(Instant.ofEpochMilli(lastModifiedMillis)) .contentEncoding("gzip") .build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenReturn(headObjectResponse); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setSizeBytes(100) .setLastModifiedMillis(lastModifiedMillis) .setResourceId(path) .setIsReadSeekEfficient(false) .build()))); } @Test public void matchNonGlobNullContentEncoding() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: long lastModifiedMillis = 1540000000000L; HeadObjectResponse headObjectResponse = HeadObjectResponse.builder() .contentLength(100L) .lastModified(Instant.ofEpochMilli(lastModifiedMillis)) .contentEncoding(null) .build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenReturn(headObjectResponse); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setSizeBytes(100) .setLastModifiedMillis(lastModifiedMillis) .setResourceId(path) .setIsReadSeekEfficient(true) .build()))); } @Test public void matchNonGlobNotFound() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: SdkServiceException exception = S3Exception.builder().message("mock exception").statusCode(404).build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenThrow(exception); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create(MatchResult.Status.NOT_FOUND, new FileNotFoundException())); } @Test public void matchNonGlobForbidden() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); SdkServiceException exception = S3Exception.builder().message("mock exception").statusCode(403).build(); S3ResourceId path = S3ResourceId.fromUri("s3: when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenThrow(exception); assertThat( s3FileSystem.matchNonGlobPath(path), MatchResultMatcher.create(MatchResult.Status.ERROR, new IOException(exception))); } static class ListObjectsV2RequestArgumentMatches implements ArgumentMatcher<ListObjectsV2Request> { private final ListObjectsV2Request expected; ListObjectsV2RequestArgumentMatches(ListObjectsV2Request expected) { this.expected = checkNotNull(expected); } @Override public boolean matches(ListObjectsV2Request argument) { if (argument != null) { return expected.bucket().equals(argument.bucket()) && expected.prefix().equals(argument.prefix()) && (expected.continuationToken() == null ? argument.continuationToken() == null : expected.continuationToken().equals(argument.continuationToken())); } return false; } } @Test public void matchGlob() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: ListObjectsV2Request firstRequest = ListObjectsV2Request.builder() .bucket(path.getBucket()) .prefix(path.getKeyNonWildcardPrefix()) .continuationToken(null) .build(); S3Object firstMatch = S3Object.builder() .key("foo/bar0baz") .size(100L) .lastModified(Instant.ofEpochMilli(1540000000001L)) .build(); S3Object secondMatch = S3Object.builder() .key("foo/bar1qux") .size(200L) .lastModified(Instant.ofEpochMilli(1540000000002L)) .build(); ListObjectsV2Response firstResponse = ListObjectsV2Response.builder() .nextContinuationToken("token") .contents(firstMatch, secondMatch) .build(); when(s3FileSystem .getS3Client() .listObjectsV2(argThat(new ListObjectsV2RequestArgumentMatches(firstRequest)))) .thenReturn(firstResponse); ListObjectsV2Request secondRequest = ListObjectsV2Request.builder() .bucket(path.getBucket()) .prefix(path.getKeyNonWildcardPrefix()) .continuationToken("token") .build(); S3Object thirdMatch = S3Object.builder() .key("foo/bar2baz") .size(300L) .lastModified(Instant.ofEpochMilli(1540000000003L)) .build(); ListObjectsV2Response secondResponse = ListObjectsV2Response.builder().nextContinuationToken(null).contents(thirdMatch).build(); when(s3FileSystem .getS3Client() .listObjectsV2(argThat(new ListObjectsV2RequestArgumentMatches(secondRequest)))) .thenReturn(secondResponse); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().contentEncoding("").build(); when(s3FileSystem.getS3Client().headObject(any(HeadObjectRequest.class))) .thenReturn(headObjectResponse); assertThat( s3FileSystem.matchGlobPaths(ImmutableList.of(path)).get(0), MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setIsReadSeekEfficient(true) .setResourceId(S3ResourceId.fromComponents(path.getBucket(), firstMatch.key())) .setSizeBytes(firstMatch.size()) .setLastModifiedMillis(firstMatch.lastModified().toEpochMilli()) .build(), MatchResult.Metadata.builder() .setIsReadSeekEfficient(true) .setResourceId(S3ResourceId.fromComponents(path.getBucket(), thirdMatch.key())) .setSizeBytes(thirdMatch.size()) .setLastModifiedMillis(thirdMatch.lastModified().toEpochMilli()) .build()))); } @Test public void matchGlobWithSlashes() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: ListObjectsV2Request request = ListObjectsV2Request.builder() .bucket(path.getBucket()) .prefix(path.getKeyNonWildcardPrefix()) .continuationToken(null) .build(); S3Object firstMatch = S3Object.builder() .key("foo/bar\\baz0") .size(100L) .lastModified(Instant.ofEpochMilli(1540000000001L)) .build(); S3Object secondMatch = S3Object.builder() .key("foo/bar/baz1") .size(200L) .lastModified(Instant.ofEpochMilli(1540000000002L)) .build(); ListObjectsV2Response response = ListObjectsV2Response.builder().contents(firstMatch, secondMatch).build(); when(s3FileSystem .getS3Client() .listObjectsV2(argThat(new ListObjectsV2RequestArgumentMatches(request)))) .thenReturn(response); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().contentEncoding("").build(); when(s3FileSystem.getS3Client().headObject(any(HeadObjectRequest.class))) .thenReturn(headObjectResponse); assertThat( s3FileSystem.matchGlobPaths(ImmutableList.of(path)).get(0), MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setIsReadSeekEfficient(true) .setResourceId(S3ResourceId.fromComponents(path.getBucket(), firstMatch.key())) .setSizeBytes(firstMatch.size()) .setLastModifiedMillis(firstMatch.lastModified().toEpochMilli()) .build()))); } @Test public void matchVariousInvokeThreadPool() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); SdkServiceException notFoundException = S3Exception.builder().message("mock exception").statusCode(404).build(); S3ResourceId pathNotExist = S3ResourceId.fromUri("s3: HeadObjectRequest headObjectRequestNotExist = HeadObjectRequest.builder() .bucket(pathNotExist.getBucket()) .key(pathNotExist.getKey()) .build(); when(s3FileSystem .getS3Client() .headObject(argThat(new GetHeadObjectRequestMatcher(headObjectRequestNotExist)))) .thenThrow(notFoundException); SdkServiceException forbiddenException = SdkServiceException.builder().message("mock exception").statusCode(403).build(); S3ResourceId pathForbidden = S3ResourceId.fromUri("s3: HeadObjectRequest headObjectRequestForbidden = HeadObjectRequest.builder() .bucket(pathForbidden.getBucket()) .key(pathForbidden.getKey()) .build(); when(s3FileSystem .getS3Client() .headObject(argThat(new GetHeadObjectRequestMatcher(headObjectRequestForbidden)))) .thenThrow(forbiddenException); S3ResourceId pathExist = S3ResourceId.fromUri("s3: HeadObjectRequest headObjectRequestExist = HeadObjectRequest.builder().bucket(pathExist.getBucket()).key(pathExist.getKey()).build(); HeadObjectResponse s3ObjectMetadata = HeadObjectResponse.builder() .contentLength(100L) .contentEncoding("not-gzip") .lastModified(Instant.ofEpochMilli(1540000000000L)) .build(); when(s3FileSystem .getS3Client() .headObject(argThat(new GetHeadObjectRequestMatcher(headObjectRequestExist)))) .thenReturn(s3ObjectMetadata); S3ResourceId pathGlob = S3ResourceId.fromUri("s3: S3Object foundListObject = S3Object.builder() .key("path/part-0") .size(200L) .lastModified(Instant.ofEpochMilli(1541000000000L)) .build(); ListObjectsV2Response listObjectsResponse = ListObjectsV2Response.builder().continuationToken(null).contents(foundListObject).build(); when(s3FileSystem.getS3Client().listObjectsV2((ListObjectsV2Request) notNull())) .thenReturn(listObjectsResponse); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().contentEncoding("").build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(pathGlob.getBucket()) .key("path/part-0") .build())))) .thenReturn(headObjectResponse); assertThat( s3FileSystem.match( ImmutableList.of( pathNotExist.toString(), pathForbidden.toString(), pathExist.toString(), pathGlob.toString())), contains( MatchResultMatcher.create(MatchResult.Status.NOT_FOUND, new FileNotFoundException()), MatchResultMatcher.create( MatchResult.Status.ERROR, new IOException(forbiddenException)), MatchResultMatcher.create(100, 1540000000000L, pathExist, true), MatchResultMatcher.create( 200, 1541000000000L, S3ResourceId.fromComponents(pathGlob.getBucket(), foundListObject.key()), true))); } @Test public void testWriteAndRead() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options(), client); client.createBucket(CreateBucketRequest.builder().bucket("testbucket").build()); byte[] writtenArray = new byte[] {0}; ByteBuffer bb = ByteBuffer.allocate(writtenArray.length); bb.put(writtenArray); S3ResourceId path = S3ResourceId.fromUri("s3: WritableByteChannel writableByteChannel = s3FileSystem.create( path, CreateOptions.StandardCreateOptions.builder().setMimeType("application/text").build()); writableByteChannel.write(bb); writableByteChannel.close(); ByteBuffer bb2 = ByteBuffer.allocate(writtenArray.length); ReadableByteChannel open = s3FileSystem.open(path); open.read(bb2); byte[] readArray = bb2.array(); assertArrayEquals(readArray, writtenArray); open.close(); } /** A mockito argument matcher to implement equality on GetHeadObjectRequest. */ private static class GetHeadObjectRequestMatcher implements ArgumentMatcher<HeadObjectRequest> { private final HeadObjectRequest expected; GetHeadObjectRequestMatcher(HeadObjectRequest expected) { this.expected = expected; } @Override public boolean matches(HeadObjectRequest obj) { if (obj == null) { return false; } return obj.bucket().equals(expected.bucket()) && obj.key().equals(expected.key()); } } }
class S3FileSystemTest { private static S3Mock api; private static S3Client client; @BeforeClass public static void beforeClass() { api = new S3Mock.Builder().withInMemoryBackend().withPort(8002).build(); Http.ServerBinding binding = api.start(); URI endpoint = URI.create("http: S3Configuration s3Configuration = S3Configuration.builder().pathStyleAccessEnabled(true).build(); client = S3Client.builder() .region(Region.US_WEST_1) .serviceConfiguration(s3Configuration) .endpointOverride(endpoint) .credentialsProvider(AnonymousCredentialsProvider.create()) .build(); } @AfterClass public static void afterClass() { api.stop(); } @Test public void testGetScheme() { S3FileSystem s3FileSystem = new S3FileSystem(s3Options()); assertEquals("s3", s3FileSystem.getScheme()); } @Test public void testGetPathStyleAccessEnabled() throws URISyntaxException { S3FileSystem s3FileSystem = new S3FileSystem(s3OptionsWithPathStyleAccessEnabled()); URL s3Url = s3FileSystem .getS3Client() .utilities() .getUrl(GetUrlRequest.builder().bucket("bucket").key("file").build()); assertEquals("https: } @Test public void testCopy() throws IOException { testCopy(s3Options()); testCopy(s3OptionsWithSSECustomerKey()); } private HeadObjectRequest createObjectHeadRequest(S3ResourceId path, S3Options options) { return HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .sseCustomerKey(options.getSSECustomerKey().getKey()) .sseCustomerAlgorithm(options.getSSECustomerKey().getAlgorithm()) .build(); } private void assertGetObjectHead( S3FileSystem s3FileSystem, HeadObjectRequest request, S3Options options, HeadObjectResponse objectMetadata) { when(s3FileSystem.getS3Client().headObject(argThat(new GetHeadObjectRequestMatcher(request)))) .thenReturn(objectMetadata); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem.getS3Client().headObject(request).sseCustomerKeyMD5()); } @Test public void testAtomicCopy() { testAtomicCopy(s3Options()); testAtomicCopy(s3OptionsWithSSECustomerKey()); } private void testAtomicCopy(S3Options options) { S3FileSystem s3FileSystem = buildMockedS3FileSystem(options); S3ResourceId sourcePath = S3ResourceId.fromUri("s3: S3ResourceId destinationPath = S3ResourceId.fromUri("s3: CopyObjectResponse.Builder builder = CopyObjectResponse.builder(); if (getSSECustomerKeyMd5(options) != null) { builder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } CopyObjectResponse copyObjectResponse = builder.build(); CopyObjectRequest copyObjectRequest = CopyObjectRequest.builder() .copySource(sourcePath.getBucket() + "/" + sourcePath.getKey()) .destinationBucket(destinationPath.getBucket()) .destinationBucket(destinationPath.getKey()) .sseCustomerKey(options.getSSECustomerKey().getKey()) .copySourceSSECustomerAlgorithm(options.getSSECustomerKey().getAlgorithm()) .build(); when(s3FileSystem.getS3Client().copyObject(any(CopyObjectRequest.class))) .thenReturn(copyObjectResponse); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem.getS3Client().copyObject(copyObjectRequest).sseCustomerKeyMD5()); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().build(); s3FileSystem.atomicCopy(sourcePath, destinationPath, headObjectResponse); verify(s3FileSystem.getS3Client(), times(2)).copyObject(any(CopyObjectRequest.class)); } @Test public void testMultipartCopy() { testMultipartCopy(s3Options()); testMultipartCopy(s3OptionsWithSSECustomerKey()); } private void testMultipartCopy(S3Options options) { S3FileSystem s3FileSystem = buildMockedS3FileSystem(options); S3ResourceId sourcePath = S3ResourceId.fromUri("s3: S3ResourceId destinationPath = S3ResourceId.fromUri("s3: CreateMultipartUploadResponse.Builder builder = CreateMultipartUploadResponse.builder().uploadId("upload-id"); if (getSSECustomerKeyMd5(options) != null) { builder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } CreateMultipartUploadResponse createMultipartUploadResponse = builder.build(); when(s3FileSystem.getS3Client().createMultipartUpload(any(CreateMultipartUploadRequest.class))) .thenReturn(createMultipartUploadResponse); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem .getS3Client() .createMultipartUpload( CreateMultipartUploadRequest.builder() .bucket(destinationPath.getBucket()) .key(destinationPath.getKey()) .build()) .sseCustomerKeyMD5()); HeadObjectResponse.Builder headObjectResponseBuilder = HeadObjectResponse.builder() .contentLength((long) (options.getS3UploadBufferSizeBytes() * 1.5)) .contentEncoding("read-seek-efficient"); if (getSSECustomerKeyMd5(options) != null) { headObjectResponseBuilder.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } HeadObjectResponse headObjectResponse = headObjectResponseBuilder.build(); assertGetObjectHead( s3FileSystem, createObjectHeadRequest(sourcePath, options), options, headObjectResponse); CopyPartResult copyPartResult1 = CopyPartResult.builder().eTag("etag-1").build(); CopyPartResult copyPartResult2 = CopyPartResult.builder().eTag("etag-2").build(); UploadPartCopyResponse.Builder uploadPartCopyResponseBuilder1 = UploadPartCopyResponse.builder().copyPartResult(copyPartResult1); UploadPartCopyResponse.Builder uploadPartCopyResponseBuilder2 = UploadPartCopyResponse.builder().copyPartResult(copyPartResult2); if (getSSECustomerKeyMd5(options) != null) { uploadPartCopyResponseBuilder1.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); uploadPartCopyResponseBuilder2.sseCustomerKeyMD5(getSSECustomerKeyMd5(options)); } UploadPartCopyResponse uploadPartCopyResponse1 = uploadPartCopyResponseBuilder1.build(); UploadPartCopyResponse uploadPartCopyResponse2 = uploadPartCopyResponseBuilder2.build(); UploadPartCopyRequest uploadPartCopyRequest = UploadPartCopyRequest.builder() .sseCustomerKey(options.getSSECustomerKey().getKey()) .build(); when(s3FileSystem.getS3Client().uploadPartCopy(any(UploadPartCopyRequest.class))) .thenReturn(uploadPartCopyResponse1) .thenReturn(uploadPartCopyResponse2); assertEquals( getSSECustomerKeyMd5(options), s3FileSystem.getS3Client().uploadPartCopy(uploadPartCopyRequest).sseCustomerKeyMD5()); s3FileSystem.multipartCopy(sourcePath, destinationPath, headObjectResponse); verify(s3FileSystem.getS3Client(), times(1)) .completeMultipartUpload(any(CompleteMultipartUploadRequest.class)); } @Test public void deleteThousandsOfObjectsInMultipleBuckets() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); List<String> buckets = ImmutableList.of("bucket1", "bucket2"); List<String> keys = new ArrayList<>(); for (int i = 0; i < 2500; i++) { keys.add(String.format("key-%d", i)); } List<S3ResourceId> paths = new ArrayList<>(); for (String bucket : buckets) { for (String key : keys) { paths.add(S3ResourceId.fromComponents(bucket, key)); } } s3FileSystem.delete(paths); verify(s3FileSystem.getS3Client(), times(6)).deleteObjects(any(DeleteObjectsRequest.class)); } @Test public void matchNonGlob() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: long lastModifiedMillis = 1540000000000L; HeadObjectResponse headObjectResponse = HeadObjectResponse.builder() .contentLength(100L) .contentEncoding("read-seek-efficient") .lastModified(Instant.ofEpochMilli(lastModifiedMillis)) .build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenReturn(headObjectResponse); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setSizeBytes(100) .setLastModifiedMillis(lastModifiedMillis) .setResourceId(path) .setIsReadSeekEfficient(true) .build()))); } @Test public void matchNonGlobNotReadSeekEfficient() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: long lastModifiedMillis = 1540000000000L; HeadObjectResponse headObjectResponse = HeadObjectResponse.builder() .contentLength(100L) .lastModified(Instant.ofEpochMilli(lastModifiedMillis)) .contentEncoding("gzip") .build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenReturn(headObjectResponse); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setSizeBytes(100) .setLastModifiedMillis(lastModifiedMillis) .setResourceId(path) .setIsReadSeekEfficient(false) .build()))); } @Test public void matchNonGlobNullContentEncoding() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: long lastModifiedMillis = 1540000000000L; HeadObjectResponse headObjectResponse = HeadObjectResponse.builder() .contentLength(100L) .lastModified(Instant.ofEpochMilli(lastModifiedMillis)) .contentEncoding(null) .build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenReturn(headObjectResponse); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setSizeBytes(100) .setLastModifiedMillis(lastModifiedMillis) .setResourceId(path) .setIsReadSeekEfficient(true) .build()))); } @Test public void matchNonGlobNotFound() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: SdkServiceException exception = S3Exception.builder().message("mock exception").statusCode(404).build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenThrow(exception); MatchResult result = s3FileSystem.matchNonGlobPath(path); assertThat( result, MatchResultMatcher.create(MatchResult.Status.NOT_FOUND, new FileNotFoundException())); } @Test public void matchNonGlobForbidden() { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); SdkServiceException exception = S3Exception.builder().message("mock exception").statusCode(403).build(); S3ResourceId path = S3ResourceId.fromUri("s3: when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(path.getBucket()) .key(path.getKey()) .build())))) .thenThrow(exception); assertThat( s3FileSystem.matchNonGlobPath(path), MatchResultMatcher.create(MatchResult.Status.ERROR, new IOException(exception))); } static class ListObjectsV2RequestArgumentMatches implements ArgumentMatcher<ListObjectsV2Request> { private final ListObjectsV2Request expected; ListObjectsV2RequestArgumentMatches(ListObjectsV2Request expected) { this.expected = checkNotNull(expected); } @Override public boolean matches(ListObjectsV2Request argument) { if (argument != null) { return expected.bucket().equals(argument.bucket()) && expected.prefix().equals(argument.prefix()) && (expected.continuationToken() == null ? argument.continuationToken() == null : expected.continuationToken().equals(argument.continuationToken())); } return false; } } @Test public void matchGlob() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: ListObjectsV2Request firstRequest = ListObjectsV2Request.builder() .bucket(path.getBucket()) .prefix(path.getKeyNonWildcardPrefix()) .continuationToken(null) .build(); S3Object firstMatch = S3Object.builder() .key("foo/bar0baz") .size(100L) .lastModified(Instant.ofEpochMilli(1540000000001L)) .build(); S3Object secondMatch = S3Object.builder() .key("foo/bar1qux") .size(200L) .lastModified(Instant.ofEpochMilli(1540000000002L)) .build(); ListObjectsV2Response firstResponse = ListObjectsV2Response.builder() .nextContinuationToken("token") .contents(firstMatch, secondMatch) .build(); when(s3FileSystem .getS3Client() .listObjectsV2(argThat(new ListObjectsV2RequestArgumentMatches(firstRequest)))) .thenReturn(firstResponse); ListObjectsV2Request secondRequest = ListObjectsV2Request.builder() .bucket(path.getBucket()) .prefix(path.getKeyNonWildcardPrefix()) .continuationToken("token") .build(); S3Object thirdMatch = S3Object.builder() .key("foo/bar2baz") .size(300L) .lastModified(Instant.ofEpochMilli(1540000000003L)) .build(); ListObjectsV2Response secondResponse = ListObjectsV2Response.builder().nextContinuationToken(null).contents(thirdMatch).build(); when(s3FileSystem .getS3Client() .listObjectsV2(argThat(new ListObjectsV2RequestArgumentMatches(secondRequest)))) .thenReturn(secondResponse); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().contentEncoding("").build(); when(s3FileSystem.getS3Client().headObject(any(HeadObjectRequest.class))) .thenReturn(headObjectResponse); assertThat( s3FileSystem.matchGlobPaths(ImmutableList.of(path)).get(0), MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setIsReadSeekEfficient(true) .setResourceId(S3ResourceId.fromComponents(path.getBucket(), firstMatch.key())) .setSizeBytes(firstMatch.size()) .setLastModifiedMillis(firstMatch.lastModified().toEpochMilli()) .build(), MatchResult.Metadata.builder() .setIsReadSeekEfficient(true) .setResourceId(S3ResourceId.fromComponents(path.getBucket(), thirdMatch.key())) .setSizeBytes(thirdMatch.size()) .setLastModifiedMillis(thirdMatch.lastModified().toEpochMilli()) .build()))); } @Test public void matchGlobWithSlashes() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); S3ResourceId path = S3ResourceId.fromUri("s3: ListObjectsV2Request request = ListObjectsV2Request.builder() .bucket(path.getBucket()) .prefix(path.getKeyNonWildcardPrefix()) .continuationToken(null) .build(); S3Object firstMatch = S3Object.builder() .key("foo/bar\\baz0") .size(100L) .lastModified(Instant.ofEpochMilli(1540000000001L)) .build(); S3Object secondMatch = S3Object.builder() .key("foo/bar/baz1") .size(200L) .lastModified(Instant.ofEpochMilli(1540000000002L)) .build(); ListObjectsV2Response response = ListObjectsV2Response.builder().contents(firstMatch, secondMatch).build(); when(s3FileSystem .getS3Client() .listObjectsV2(argThat(new ListObjectsV2RequestArgumentMatches(request)))) .thenReturn(response); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().contentEncoding("").build(); when(s3FileSystem.getS3Client().headObject(any(HeadObjectRequest.class))) .thenReturn(headObjectResponse); assertThat( s3FileSystem.matchGlobPaths(ImmutableList.of(path)).get(0), MatchResultMatcher.create( ImmutableList.of( MatchResult.Metadata.builder() .setIsReadSeekEfficient(true) .setResourceId(S3ResourceId.fromComponents(path.getBucket(), firstMatch.key())) .setSizeBytes(firstMatch.size()) .setLastModifiedMillis(firstMatch.lastModified().toEpochMilli()) .build()))); } @Test public void matchVariousInvokeThreadPool() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options()); SdkServiceException notFoundException = S3Exception.builder().message("mock exception").statusCode(404).build(); S3ResourceId pathNotExist = S3ResourceId.fromUri("s3: HeadObjectRequest headObjectRequestNotExist = HeadObjectRequest.builder() .bucket(pathNotExist.getBucket()) .key(pathNotExist.getKey()) .build(); when(s3FileSystem .getS3Client() .headObject(argThat(new GetHeadObjectRequestMatcher(headObjectRequestNotExist)))) .thenThrow(notFoundException); SdkServiceException forbiddenException = SdkServiceException.builder().message("mock exception").statusCode(403).build(); S3ResourceId pathForbidden = S3ResourceId.fromUri("s3: HeadObjectRequest headObjectRequestForbidden = HeadObjectRequest.builder() .bucket(pathForbidden.getBucket()) .key(pathForbidden.getKey()) .build(); when(s3FileSystem .getS3Client() .headObject(argThat(new GetHeadObjectRequestMatcher(headObjectRequestForbidden)))) .thenThrow(forbiddenException); S3ResourceId pathExist = S3ResourceId.fromUri("s3: HeadObjectRequest headObjectRequestExist = HeadObjectRequest.builder().bucket(pathExist.getBucket()).key(pathExist.getKey()).build(); HeadObjectResponse s3ObjectMetadata = HeadObjectResponse.builder() .contentLength(100L) .contentEncoding("not-gzip") .lastModified(Instant.ofEpochMilli(1540000000000L)) .build(); when(s3FileSystem .getS3Client() .headObject(argThat(new GetHeadObjectRequestMatcher(headObjectRequestExist)))) .thenReturn(s3ObjectMetadata); S3ResourceId pathGlob = S3ResourceId.fromUri("s3: S3Object foundListObject = S3Object.builder() .key("path/part-0") .size(200L) .lastModified(Instant.ofEpochMilli(1541000000000L)) .build(); ListObjectsV2Response listObjectsResponse = ListObjectsV2Response.builder().continuationToken(null).contents(foundListObject).build(); when(s3FileSystem.getS3Client().listObjectsV2((ListObjectsV2Request) notNull())) .thenReturn(listObjectsResponse); HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().contentEncoding("").build(); when(s3FileSystem .getS3Client() .headObject( argThat( new GetHeadObjectRequestMatcher( HeadObjectRequest.builder() .bucket(pathGlob.getBucket()) .key("path/part-0") .build())))) .thenReturn(headObjectResponse); assertThat( s3FileSystem.match( ImmutableList.of( pathNotExist.toString(), pathForbidden.toString(), pathExist.toString(), pathGlob.toString())), contains( MatchResultMatcher.create(MatchResult.Status.NOT_FOUND, new FileNotFoundException()), MatchResultMatcher.create( MatchResult.Status.ERROR, new IOException(forbiddenException)), MatchResultMatcher.create(100, 1540000000000L, pathExist, true), MatchResultMatcher.create( 200, 1541000000000L, S3ResourceId.fromComponents(pathGlob.getBucket(), foundListObject.key()), true))); } @Test public void testWriteAndRead() throws IOException { S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Options(), client); client.createBucket(CreateBucketRequest.builder().bucket("testbucket").build()); byte[] writtenArray = new byte[] {0}; ByteBuffer bb = ByteBuffer.allocate(writtenArray.length); bb.put(writtenArray); S3ResourceId path = S3ResourceId.fromUri("s3: WritableByteChannel writableByteChannel = s3FileSystem.create( path, CreateOptions.StandardCreateOptions.builder().setMimeType("application/text").build()); writableByteChannel.write(bb); writableByteChannel.close(); ByteBuffer bb2 = ByteBuffer.allocate(writtenArray.length); ReadableByteChannel open = s3FileSystem.open(path); open.read(bb2); byte[] readArray = bb2.array(); assertArrayEquals(readArray, writtenArray); open.close(); } /** A mockito argument matcher to implement equality on GetHeadObjectRequest. */ private static class GetHeadObjectRequestMatcher implements ArgumentMatcher<HeadObjectRequest> { private final HeadObjectRequest expected; GetHeadObjectRequestMatcher(HeadObjectRequest expected) { this.expected = expected; } @Override public boolean matches(HeadObjectRequest obj) { if (obj == null) { return false; } return obj.bucket().equals(expected.bucket()) && obj.key().equals(expected.key()); } } }
This TODO can be solved now
protected Mono<NetworkInterfaceInner> getInnerAsync() { return this .manager() .inner() .networkInterfaces() .getByResourceGroupAsync(this.resourceGroupName(), this.name(), null); }
protected Mono<NetworkInterfaceInner> getInnerAsync() { return this .manager() .inner() .networkInterfaces() .getByResourceGroupAsync(this.resourceGroupName(), this.name()); }
class NetworkInterfaceImpl extends GroupableParentResourceWithTagsImpl< NetworkInterface, NetworkInterfaceInner, NetworkInterfaceImpl, NetworkManager> implements NetworkInterface, NetworkInterface.Definition, NetworkInterface.Update { /** the name of the network interface. */ private final String nicName; /** used to generate unique name for any dependency resources. */ protected final ResourceNamer namer; /** references to all ip configuration. */ private Map<String, NicIPConfiguration> nicIPConfigurations; /** unique key of a creatable network security group to be associated with the network interface. */ private String creatableNetworkSecurityGroupKey; /** reference to an network security group to be associated with the network interface. */ private NetworkSecurityGroup existingNetworkSecurityGroupToAssociate; /** cached related resources. */ private NetworkSecurityGroup networkSecurityGroup; NetworkInterfaceImpl(String name, NetworkInterfaceInner innerModel, final NetworkManager networkManager) { super(name, innerModel, networkManager); this.nicName = name; this.namer = this.manager().getSdkContext().getResourceNamerFactory().createResourceNamer(this.nicName); initializeChildrenFromInner(); } @Override public Mono<NetworkInterface> refreshAsync() { return super .refreshAsync() .map( networkInterface -> { NetworkInterfaceImpl impl = (NetworkInterfaceImpl) networkInterface; impl.clearCachedRelatedResources(); impl.initializeChildrenFromInner(); return impl; }); } @Override @Override protected Mono<NetworkInterfaceInner> applyTagsToInnerAsync() { return this .manager() .inner() .networkInterfaces() .updateTagsAsync(resourceGroupName(), name(), inner().getTags()); } @Override public NetworkInterfaceImpl withAcceleratedNetworking() { this.inner().withEnableAcceleratedNetworking(true); return this; } @Override public NetworkInterfaceImpl withoutAcceleratedNetworking() { this.inner().withEnableAcceleratedNetworking(false); return this; } @Override public NetworkInterfaceImpl withNewPrimaryNetwork(Creatable<Network> creatable) { this.primaryIPConfiguration().withNewNetwork(creatable); return this; } @Override public NetworkInterfaceImpl withNewPrimaryNetwork(String name, String addressSpaceCidr) { this.primaryIPConfiguration().withNewNetwork(name, addressSpaceCidr); return this; } @Override public NetworkInterfaceImpl withNewPrimaryNetwork(String addressSpaceCidr) { this.primaryIPConfiguration().withNewNetwork(addressSpaceCidr); return this; } @Override public NetworkInterfaceImpl withExistingPrimaryNetwork(Network network) { this.primaryIPConfiguration().withExistingNetwork(network); return this; } @Override public NetworkInterfaceImpl withNewPrimaryPublicIPAddress(Creatable<PublicIPAddress> creatable) { this.primaryIPConfiguration().withNewPublicIPAddress(creatable); return this; } @Override public NetworkInterfaceImpl withNewPrimaryPublicIPAddress() { this.primaryIPConfiguration().withNewPublicIPAddress(); return this; } @Override public NetworkInterfaceImpl withNewPrimaryPublicIPAddress(String leafDnsLabel) { this.primaryIPConfiguration().withNewPublicIPAddress(leafDnsLabel); return this; } @Override public NetworkInterfaceImpl withExistingLoadBalancerBackend(LoadBalancer loadBalancer, String backendName) { this.primaryIPConfiguration().withExistingLoadBalancerBackend(loadBalancer, backendName); return this; } @Override public NetworkInterfaceImpl withExistingLoadBalancerInboundNatRule( LoadBalancer loadBalancer, String inboundNatRuleName) { this.primaryIPConfiguration().withExistingLoadBalancerInboundNatRule(loadBalancer, inboundNatRuleName); return this; } @Override public Update withoutLoadBalancerBackends() { for (NicIPConfiguration ipConfig : this.ipConfigurations().values()) { this.updateIPConfiguration(ipConfig.name()).withoutLoadBalancerBackends(); } return this; } @Override public Update withoutLoadBalancerInboundNatRules() { for (NicIPConfiguration ipConfig : this.ipConfigurations().values()) { this.updateIPConfiguration(ipConfig.name()).withoutLoadBalancerInboundNatRules(); } return this; } @Override public NetworkInterfaceImpl withoutPrimaryPublicIPAddress() { this.primaryIPConfiguration().withoutPublicIPAddress(); return this; } @Override public NetworkInterfaceImpl withExistingPrimaryPublicIPAddress(PublicIPAddress publicIPAddress) { this.primaryIPConfiguration().withExistingPublicIPAddress(publicIPAddress); return this; } @Override public NetworkInterfaceImpl withPrimaryPrivateIPAddressDynamic() { this.primaryIPConfiguration().withPrivateIPAddressDynamic(); return this; } @Override public NetworkInterfaceImpl withPrimaryPrivateIPAddressStatic(String staticPrivateIPAddress) { this.primaryIPConfiguration().withPrivateIPAddressStatic(staticPrivateIPAddress); return this; } @Override public NetworkInterfaceImpl withNewNetworkSecurityGroup(Creatable<NetworkSecurityGroup> creatable) { if (this.creatableNetworkSecurityGroupKey == null) { this.creatableNetworkSecurityGroupKey = this.addDependency(creatable); } return this; } @Override public NetworkInterfaceImpl withExistingNetworkSecurityGroup(NetworkSecurityGroup networkSecurityGroup) { this.existingNetworkSecurityGroupToAssociate = networkSecurityGroup; return this; } @Override public NetworkInterfaceImpl withoutNetworkSecurityGroup() { this.inner().withNetworkSecurityGroup(null); return this; } @Override public NicIPConfigurationImpl defineSecondaryIPConfiguration(String name) { return prepareNewNicIPConfiguration(name); } @Override public NicIPConfigurationImpl updateIPConfiguration(String name) { return (NicIPConfigurationImpl) this.nicIPConfigurations.get(name); } @Override public NetworkInterfaceImpl withIPForwarding() { this.inner().withEnableIPForwarding(true); return this; } @Override public NetworkInterfaceImpl withoutIPConfiguration(String name) { this.nicIPConfigurations.remove(name); return this; } @Override public NetworkInterfaceImpl withoutIPForwarding() { this.inner().withEnableIPForwarding(false); return this; } @Override public NetworkInterfaceImpl withDnsServer(String ipAddress) { this.dnsServerIPs().add(ipAddress); return this; } @Override public NetworkInterfaceImpl withoutDnsServer(String ipAddress) { this.dnsServerIPs().remove(ipAddress); return this; } @Override public NetworkInterfaceImpl withAzureDnsServer() { this.dnsServerIPs().clear(); return this; } @Override public NetworkInterfaceImpl withSubnet(String name) { this.primaryIPConfiguration().withSubnet(name); return this; } @Override public NetworkInterfaceImpl withInternalDnsNameLabel(String dnsNameLabel) { this.inner().dnsSettings().withInternalDnsNameLabel(dnsNameLabel); return this; } @Override public boolean isAcceleratedNetworkingEnabled() { return Utils.toPrimitiveBoolean(this.inner().enableAcceleratedNetworking()); } @Override public String virtualMachineId() { if (this.inner().virtualMachine() != null) { return this.inner().virtualMachine().getId(); } else { return null; } } @Override public boolean isIPForwardingEnabled() { return Utils.toPrimitiveBoolean(this.inner().enableIPForwarding()); } @Override public String macAddress() { return this.inner().macAddress(); } @Override public String internalDnsNameLabel() { return (this.inner().dnsSettings() != null) ? this.inner().dnsSettings().internalDnsNameLabel() : null; } @Override public String internalDomainNameSuffix() { return (this.inner().dnsSettings() != null) ? this.inner().dnsSettings().internalDomainNameSuffix() : null; } @Override public List<String> appliedDnsServers() { List<String> dnsServers = new ArrayList<String>(); if (this.inner().dnsSettings() == null) { return Collections.unmodifiableList(dnsServers); } else if (this.inner().dnsSettings().appliedDnsServers() == null) { return Collections.unmodifiableList(dnsServers); } else { return Collections.unmodifiableList(this.inner().dnsSettings().appliedDnsServers()); } } @Override public String internalFqdn() { return (this.inner().dnsSettings() != null) ? this.inner().dnsSettings().internalFqdn() : null; } @Override public List<String> dnsServers() { return this.dnsServerIPs(); } @Override public String primaryPrivateIP() { return this.primaryIPConfiguration().privateIPAddress(); } @Override public IPAllocationMethod primaryPrivateIPAllocationMethod() { return this.primaryIPConfiguration().privateIPAllocationMethod(); } @Override public Map<String, NicIPConfiguration> ipConfigurations() { return Collections.unmodifiableMap(this.nicIPConfigurations); } @Override public String networkSecurityGroupId() { return (this.inner().networkSecurityGroup() != null) ? this.inner().networkSecurityGroup().getId() : null; } @Override public NetworkSecurityGroup getNetworkSecurityGroup() { if (this.networkSecurityGroup == null && this.networkSecurityGroupId() != null) { String id = this.networkSecurityGroupId(); this.networkSecurityGroup = super .myManager .networkSecurityGroups() .getByResourceGroup(ResourceUtils.groupFromResourceId(id), ResourceUtils.nameFromResourceId(id)); } return this.networkSecurityGroup; } /** @return the primary IP configuration of the network interface */ @Override public NicIPConfigurationImpl primaryIPConfiguration() { NicIPConfigurationImpl primaryIPConfig = null; if (this.nicIPConfigurations.size() == 0) { primaryIPConfig = prepareNewNicIPConfiguration("primary"); primaryIPConfig.inner().withPrimary(true); withIPConfiguration(primaryIPConfig); } else if (this.nicIPConfigurations.size() == 1) { primaryIPConfig = (NicIPConfigurationImpl) this.nicIPConfigurations.values().iterator().next(); } else { for (NicIPConfiguration ipConfig : this.nicIPConfigurations.values()) { if (ipConfig.isPrimary()) { primaryIPConfig = (NicIPConfigurationImpl) ipConfig; break; } } } return primaryIPConfig; } /** @return the list of DNS server IPs from the DNS settings */ private List<String> dnsServerIPs() { List<String> dnsServers = new ArrayList<String>(); if (this.inner().dnsSettings() == null) { return dnsServers; } else if (this.inner().dnsSettings().dnsServers() == null) { return dnsServers; } else { return this.inner().dnsSettings().dnsServers(); } } @Override protected void initializeChildrenFromInner() { this.nicIPConfigurations = new TreeMap<>(); List<NetworkInterfaceIPConfigurationInner> inners = this.inner().ipConfigurations(); if (inners != null) { for (NetworkInterfaceIPConfigurationInner inner : inners) { NicIPConfigurationImpl nicIPConfiguration = new NicIPConfigurationImpl(inner, this, super.myManager, false); this.nicIPConfigurations.put(nicIPConfiguration.name(), nicIPConfiguration); } } } /** * Gets a new IP configuration child resource {@link NicIPConfiguration} wrapping {@link * NetworkInterfaceIPConfigurationInner}. * * @param name the name for the new ip configuration * @return {@link NicIPConfiguration} */ private NicIPConfigurationImpl prepareNewNicIPConfiguration(String name) { NicIPConfigurationImpl nicIPConfiguration = NicIPConfigurationImpl.prepareNicIPConfiguration(name, this, super.myManager); return nicIPConfiguration; } private void clearCachedRelatedResources() { this.networkSecurityGroup = null; } NetworkInterfaceImpl withIPConfiguration(NicIPConfigurationImpl nicIPConfiguration) { this.nicIPConfigurations.put(nicIPConfiguration.name(), nicIPConfiguration); return this; } void addToCreatableDependencies(Creatable<? extends Resource> creatableResource) { this.addDependency(creatableResource); } Resource createdDependencyResource(String key) { return this.<Resource>taskResult(key); } Creatable<ResourceGroup> newGroup() { return this.creatableGroup; } @Override protected Mono<NetworkInterfaceInner> createInner() { return this .manager() .inner() .networkInterfaces() .createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner()); } @Override protected void afterCreating() { clearCachedRelatedResources(); } @Override protected void beforeCreating() { NetworkSecurityGroup networkSecurityGroup = null; if (creatableNetworkSecurityGroupKey != null) { networkSecurityGroup = this.<NetworkSecurityGroup>taskResult(creatableNetworkSecurityGroupKey); } else if (existingNetworkSecurityGroupToAssociate != null) { networkSecurityGroup = existingNetworkSecurityGroupToAssociate; } if (networkSecurityGroup != null) { this.inner().withNetworkSecurityGroup(new NetworkSecurityGroupInner().withId(networkSecurityGroup.id())); } NicIPConfigurationImpl.ensureConfigurations(this.nicIPConfigurations.values()); this.inner().withIpConfigurations(innersFromWrappers(this.nicIPConfigurations.values())); } }
class NetworkInterfaceImpl extends GroupableParentResourceWithTagsImpl< NetworkInterface, NetworkInterfaceInner, NetworkInterfaceImpl, NetworkManager> implements NetworkInterface, NetworkInterface.Definition, NetworkInterface.Update { /** the name of the network interface. */ private final String nicName; /** used to generate unique name for any dependency resources. */ protected final ResourceNamer namer; /** references to all ip configuration. */ private Map<String, NicIPConfiguration> nicIPConfigurations; /** unique key of a creatable network security group to be associated with the network interface. */ private String creatableNetworkSecurityGroupKey; /** reference to an network security group to be associated with the network interface. */ private NetworkSecurityGroup existingNetworkSecurityGroupToAssociate; /** cached related resources. */ private NetworkSecurityGroup networkSecurityGroup; NetworkInterfaceImpl(String name, NetworkInterfaceInner innerModel, final NetworkManager networkManager) { super(name, innerModel, networkManager); this.nicName = name; this.namer = this.manager().getSdkContext().getResourceNamerFactory().createResourceNamer(this.nicName); initializeChildrenFromInner(); } @Override public Mono<NetworkInterface> refreshAsync() { return super .refreshAsync() .map( networkInterface -> { NetworkInterfaceImpl impl = (NetworkInterfaceImpl) networkInterface; impl.clearCachedRelatedResources(); impl.initializeChildrenFromInner(); return impl; }); } @Override @Override protected Mono<NetworkInterfaceInner> applyTagsToInnerAsync() { return this .manager() .inner() .networkInterfaces() .updateTagsAsync(resourceGroupName(), name(), inner().getTags()); } @Override public NetworkInterfaceImpl withAcceleratedNetworking() { this.inner().withEnableAcceleratedNetworking(true); return this; } @Override public NetworkInterfaceImpl withoutAcceleratedNetworking() { this.inner().withEnableAcceleratedNetworking(false); return this; } @Override public NetworkInterfaceImpl withNewPrimaryNetwork(Creatable<Network> creatable) { this.primaryIPConfiguration().withNewNetwork(creatable); return this; } @Override public NetworkInterfaceImpl withNewPrimaryNetwork(String name, String addressSpaceCidr) { this.primaryIPConfiguration().withNewNetwork(name, addressSpaceCidr); return this; } @Override public NetworkInterfaceImpl withNewPrimaryNetwork(String addressSpaceCidr) { this.primaryIPConfiguration().withNewNetwork(addressSpaceCidr); return this; } @Override public NetworkInterfaceImpl withExistingPrimaryNetwork(Network network) { this.primaryIPConfiguration().withExistingNetwork(network); return this; } @Override public NetworkInterfaceImpl withNewPrimaryPublicIPAddress(Creatable<PublicIPAddress> creatable) { this.primaryIPConfiguration().withNewPublicIPAddress(creatable); return this; } @Override public NetworkInterfaceImpl withNewPrimaryPublicIPAddress() { this.primaryIPConfiguration().withNewPublicIPAddress(); return this; } @Override public NetworkInterfaceImpl withNewPrimaryPublicIPAddress(String leafDnsLabel) { this.primaryIPConfiguration().withNewPublicIPAddress(leafDnsLabel); return this; } @Override public NetworkInterfaceImpl withExistingLoadBalancerBackend(LoadBalancer loadBalancer, String backendName) { this.primaryIPConfiguration().withExistingLoadBalancerBackend(loadBalancer, backendName); return this; } @Override public NetworkInterfaceImpl withExistingLoadBalancerInboundNatRule( LoadBalancer loadBalancer, String inboundNatRuleName) { this.primaryIPConfiguration().withExistingLoadBalancerInboundNatRule(loadBalancer, inboundNatRuleName); return this; } @Override public Update withoutLoadBalancerBackends() { for (NicIPConfiguration ipConfig : this.ipConfigurations().values()) { this.updateIPConfiguration(ipConfig.name()).withoutLoadBalancerBackends(); } return this; } @Override public Update withoutLoadBalancerInboundNatRules() { for (NicIPConfiguration ipConfig : this.ipConfigurations().values()) { this.updateIPConfiguration(ipConfig.name()).withoutLoadBalancerInboundNatRules(); } return this; } @Override public NetworkInterfaceImpl withoutPrimaryPublicIPAddress() { this.primaryIPConfiguration().withoutPublicIPAddress(); return this; } @Override public NetworkInterfaceImpl withExistingPrimaryPublicIPAddress(PublicIPAddress publicIPAddress) { this.primaryIPConfiguration().withExistingPublicIPAddress(publicIPAddress); return this; } @Override public NetworkInterfaceImpl withPrimaryPrivateIPAddressDynamic() { this.primaryIPConfiguration().withPrivateIPAddressDynamic(); return this; } @Override public NetworkInterfaceImpl withPrimaryPrivateIPAddressStatic(String staticPrivateIPAddress) { this.primaryIPConfiguration().withPrivateIPAddressStatic(staticPrivateIPAddress); return this; } @Override public NetworkInterfaceImpl withNewNetworkSecurityGroup(Creatable<NetworkSecurityGroup> creatable) { if (this.creatableNetworkSecurityGroupKey == null) { this.creatableNetworkSecurityGroupKey = this.addDependency(creatable); } return this; } @Override public NetworkInterfaceImpl withExistingNetworkSecurityGroup(NetworkSecurityGroup networkSecurityGroup) { this.existingNetworkSecurityGroupToAssociate = networkSecurityGroup; return this; } @Override public NetworkInterfaceImpl withoutNetworkSecurityGroup() { this.inner().withNetworkSecurityGroup(null); return this; } @Override public NicIPConfigurationImpl defineSecondaryIPConfiguration(String name) { return prepareNewNicIPConfiguration(name); } @Override public NicIPConfigurationImpl updateIPConfiguration(String name) { return (NicIPConfigurationImpl) this.nicIPConfigurations.get(name); } @Override public NetworkInterfaceImpl withIPForwarding() { this.inner().withEnableIPForwarding(true); return this; } @Override public NetworkInterfaceImpl withoutIPConfiguration(String name) { this.nicIPConfigurations.remove(name); return this; } @Override public NetworkInterfaceImpl withoutIPForwarding() { this.inner().withEnableIPForwarding(false); return this; } @Override public NetworkInterfaceImpl withDnsServer(String ipAddress) { this.dnsServerIPs().add(ipAddress); return this; } @Override public NetworkInterfaceImpl withoutDnsServer(String ipAddress) { this.dnsServerIPs().remove(ipAddress); return this; } @Override public NetworkInterfaceImpl withAzureDnsServer() { this.dnsServerIPs().clear(); return this; } @Override public NetworkInterfaceImpl withSubnet(String name) { this.primaryIPConfiguration().withSubnet(name); return this; } @Override public NetworkInterfaceImpl withInternalDnsNameLabel(String dnsNameLabel) { this.inner().dnsSettings().withInternalDnsNameLabel(dnsNameLabel); return this; } @Override public boolean isAcceleratedNetworkingEnabled() { return Utils.toPrimitiveBoolean(this.inner().enableAcceleratedNetworking()); } @Override public String virtualMachineId() { if (this.inner().virtualMachine() != null) { return this.inner().virtualMachine().getId(); } else { return null; } } @Override public boolean isIPForwardingEnabled() { return Utils.toPrimitiveBoolean(this.inner().enableIPForwarding()); } @Override public String macAddress() { return this.inner().macAddress(); } @Override public String internalDnsNameLabel() { return (this.inner().dnsSettings() != null) ? this.inner().dnsSettings().internalDnsNameLabel() : null; } @Override public String internalDomainNameSuffix() { return (this.inner().dnsSettings() != null) ? this.inner().dnsSettings().internalDomainNameSuffix() : null; } @Override public List<String> appliedDnsServers() { List<String> dnsServers = new ArrayList<String>(); if (this.inner().dnsSettings() == null) { return Collections.unmodifiableList(dnsServers); } else if (this.inner().dnsSettings().appliedDnsServers() == null) { return Collections.unmodifiableList(dnsServers); } else { return Collections.unmodifiableList(this.inner().dnsSettings().appliedDnsServers()); } } @Override public String internalFqdn() { return (this.inner().dnsSettings() != null) ? this.inner().dnsSettings().internalFqdn() : null; } @Override public List<String> dnsServers() { return this.dnsServerIPs(); } @Override public String primaryPrivateIP() { return this.primaryIPConfiguration().privateIPAddress(); } @Override public IPAllocationMethod primaryPrivateIPAllocationMethod() { return this.primaryIPConfiguration().privateIPAllocationMethod(); } @Override public Map<String, NicIPConfiguration> ipConfigurations() { return Collections.unmodifiableMap(this.nicIPConfigurations); } @Override public String networkSecurityGroupId() { return (this.inner().networkSecurityGroup() != null) ? this.inner().networkSecurityGroup().getId() : null; } @Override public NetworkSecurityGroup getNetworkSecurityGroup() { if (this.networkSecurityGroup == null && this.networkSecurityGroupId() != null) { String id = this.networkSecurityGroupId(); this.networkSecurityGroup = super .myManager .networkSecurityGroups() .getByResourceGroup(ResourceUtils.groupFromResourceId(id), ResourceUtils.nameFromResourceId(id)); } return this.networkSecurityGroup; } /** @return the primary IP configuration of the network interface */ @Override public NicIPConfigurationImpl primaryIPConfiguration() { NicIPConfigurationImpl primaryIPConfig = null; if (this.nicIPConfigurations.size() == 0) { primaryIPConfig = prepareNewNicIPConfiguration("primary"); primaryIPConfig.inner().withPrimary(true); withIPConfiguration(primaryIPConfig); } else if (this.nicIPConfigurations.size() == 1) { primaryIPConfig = (NicIPConfigurationImpl) this.nicIPConfigurations.values().iterator().next(); } else { for (NicIPConfiguration ipConfig : this.nicIPConfigurations.values()) { if (ipConfig.isPrimary()) { primaryIPConfig = (NicIPConfigurationImpl) ipConfig; break; } } } return primaryIPConfig; } /** @return the list of DNS server IPs from the DNS settings */ private List<String> dnsServerIPs() { List<String> dnsServers = new ArrayList<String>(); if (this.inner().dnsSettings() == null) { return dnsServers; } else if (this.inner().dnsSettings().dnsServers() == null) { return dnsServers; } else { return this.inner().dnsSettings().dnsServers(); } } @Override protected void initializeChildrenFromInner() { this.nicIPConfigurations = new TreeMap<>(); List<NetworkInterfaceIPConfigurationInner> inners = this.inner().ipConfigurations(); if (inners != null) { for (NetworkInterfaceIPConfigurationInner inner : inners) { NicIPConfigurationImpl nicIPConfiguration = new NicIPConfigurationImpl(inner, this, super.myManager, false); this.nicIPConfigurations.put(nicIPConfiguration.name(), nicIPConfiguration); } } } /** * Gets a new IP configuration child resource {@link NicIPConfiguration} wrapping {@link * NetworkInterfaceIPConfigurationInner}. * * @param name the name for the new ip configuration * @return {@link NicIPConfiguration} */ private NicIPConfigurationImpl prepareNewNicIPConfiguration(String name) { NicIPConfigurationImpl nicIPConfiguration = NicIPConfigurationImpl.prepareNicIPConfiguration(name, this, super.myManager); return nicIPConfiguration; } private void clearCachedRelatedResources() { this.networkSecurityGroup = null; } NetworkInterfaceImpl withIPConfiguration(NicIPConfigurationImpl nicIPConfiguration) { this.nicIPConfigurations.put(nicIPConfiguration.name(), nicIPConfiguration); return this; } void addToCreatableDependencies(Creatable<? extends Resource> creatableResource) { this.addDependency(creatableResource); } Resource createdDependencyResource(String key) { return this.<Resource>taskResult(key); } Creatable<ResourceGroup> newGroup() { return this.creatableGroup; } @Override protected Mono<NetworkInterfaceInner> createInner() { return this .manager() .inner() .networkInterfaces() .createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner()); } @Override protected void afterCreating() { clearCachedRelatedResources(); } @Override protected void beforeCreating() { NetworkSecurityGroup networkSecurityGroup = null; if (creatableNetworkSecurityGroupKey != null) { networkSecurityGroup = this.<NetworkSecurityGroup>taskResult(creatableNetworkSecurityGroupKey); } else if (existingNetworkSecurityGroupToAssociate != null) { networkSecurityGroup = existingNetworkSecurityGroupToAssociate; } if (networkSecurityGroup != null) { this.inner().withNetworkSecurityGroup(new NetworkSecurityGroupInner().withId(networkSecurityGroup.id())); } NicIPConfigurationImpl.ensureConfigurations(this.nicIPConfigurations.values()); this.inner().withIpConfigurations(innersFromWrappers(this.nicIPConfigurations.values())); } }
To make sure the mv's plan is selected forcefully, skip to check upper cost for the group expression which contains mv rewrite plan.
public void execute() { if (groupExpression.isUnused()) { return; } if (!checkCTEPropertyValid(groupExpression, context.getRequiredProperty())) { return; } initRequiredProperties(); SessionVariable sessionVariable = context.getOptimizerContext().getSessionVariable(); boolean isMVPlanForceRewrite = sessionVariable.isEnableMaterializedViewForceRewrite() && MvUtils.hasAppliedMVRules(groupExpression); for (; curPropertyPairIndex < childrenRequiredPropertiesList.size(); curPropertyPairIndex++) { List<PhysicalPropertySet> childrenRequiredProperties = childrenRequiredPropertiesList.get(curPropertyPairIndex); if (curChildIndex == 0 && prevChildIndex == -1) { localCost = CostModel.calculateCost(groupExpression); curTotalCost += localCost; } for (; curChildIndex < groupExpression.getInputs().size(); curChildIndex++) { PhysicalPropertySet childRequiredProperty = childrenRequiredProperties.get(curChildIndex); Group childGroup = groupExpression.getInputs().get(curChildIndex); GroupExpression childBestExpr = childGroup.getBestExpression(childRequiredProperty); if (childBestExpr == null && prevChildIndex >= curChildIndex) { break; } if (childBestExpr == null) { prevChildIndex = curChildIndex; optimizeChildGroup(childRequiredProperty, childGroup); return; } childrenBestExprList.add(childBestExpr); PhysicalPropertySet childOutputProperty = childBestExpr.getOutputProperty(childRequiredProperty); childrenOutputProperties.add(childOutputProperty); childrenRequiredProperties.set(curChildIndex, childOutputProperty); if (isMVPlanForceRewrite) { continue; } if (!canGenerateOneStageAgg(childBestExpr)) { break; } if (!checkBroadcastRowCountLimit(childRequiredProperty, childBestExpr)) { break; } curTotalCost += childBestExpr.getCost(childRequiredProperty); if (curTotalCost > context.getUpperBoundCost()) { break; } } if (curChildIndex == groupExpression.getInputs().size()) { ChildOutputPropertyGuarantor childOutputPropertyGuarantor = new ChildOutputPropertyGuarantor(context, groupExpression, context.getRequiredProperty(), childrenBestExprList, childrenRequiredProperties, childrenOutputProperties, curTotalCost); curTotalCost = childOutputPropertyGuarantor.enforceLegalChildOutputProperty(); if (curTotalCost > context.getUpperBoundCost()) { break; } if (!computeCurrentGroupStatistics()) { return; } OutputPropertyDeriver outputPropertyDeriver = new OutputPropertyDeriver(groupExpression, context.getRequiredProperty(), childrenOutputProperties); PhysicalPropertySet outputProperty = outputPropertyDeriver.getOutputProperty(); recordCostsAndEnforce(outputProperty, childrenRequiredProperties); } prevChildIndex = -1; curChildIndex = 0; curTotalCost = 0; childrenBestExprList.clear(); childrenOutputProperties.clear(); } }
if (isMVPlanForceRewrite) {
public void execute() { if (groupExpression.isUnused()) { return; } if (!checkCTEPropertyValid(groupExpression, context.getRequiredProperty())) { return; } if (context.getOptimizerContext().getSessionVariable().isEnableMaterializedViewForceRewrite() && groupExpression.getGroup().hasMVGroupExpression()) { if (!groupExpression.hasAppliedMVRules()) { return; } else { groupExpression.getGroup().forceChooseMVExpression(context); } } initRequiredProperties(); for (; curPropertyPairIndex < childrenRequiredPropertiesList.size(); curPropertyPairIndex++) { List<PhysicalPropertySet> childrenRequiredProperties = childrenRequiredPropertiesList.get(curPropertyPairIndex); if (curChildIndex == 0 && prevChildIndex == -1) { localCost = CostModel.calculateCost(groupExpression); curTotalCost += localCost; } for (; curChildIndex < groupExpression.getInputs().size(); curChildIndex++) { PhysicalPropertySet childRequiredProperty = childrenRequiredProperties.get(curChildIndex); Group childGroup = groupExpression.getInputs().get(curChildIndex); GroupExpression childBestExpr = childGroup.getBestExpression(childRequiredProperty); if (childBestExpr == null && prevChildIndex >= curChildIndex) { break; } if (childBestExpr == null) { prevChildIndex = curChildIndex; optimizeChildGroup(childRequiredProperty, childGroup); return; } childrenBestExprList.add(childBestExpr); PhysicalPropertySet childOutputProperty = childBestExpr.getOutputProperty(childRequiredProperty); childrenOutputProperties.add(childOutputProperty); childrenRequiredProperties.set(curChildIndex, childOutputProperty); if (!canGenerateOneStageAgg(childBestExpr)) { break; } if (!checkBroadcastRowCountLimit(childRequiredProperty, childBestExpr)) { break; } curTotalCost += childBestExpr.getCost(childRequiredProperty); if (curTotalCost > context.getUpperBoundCost()) { break; } } if (curChildIndex == groupExpression.getInputs().size()) { ChildOutputPropertyGuarantor childOutputPropertyGuarantor = new ChildOutputPropertyGuarantor(context, groupExpression, context.getRequiredProperty(), childrenBestExprList, childrenRequiredProperties, childrenOutputProperties, curTotalCost); curTotalCost = childOutputPropertyGuarantor.enforceLegalChildOutputProperty(); if (curTotalCost > context.getUpperBoundCost()) { break; } if (!computeCurrentGroupStatistics()) { return; } OutputPropertyDeriver outputPropertyDeriver = new OutputPropertyDeriver(groupExpression, context.getRequiredProperty(), childrenOutputProperties); PhysicalPropertySet outputProperty = outputPropertyDeriver.getOutputProperty(); recordCostsAndEnforce(outputProperty, childrenRequiredProperties); } prevChildIndex = -1; curChildIndex = 0; curTotalCost = 0; childrenBestExprList.clear(); childrenOutputProperties.clear(); } }
class EnforceAndCostTask extends OptimizerTask implements Cloneable { private final GroupExpression groupExpression; private List<List<PhysicalPropertySet>> childrenRequiredPropertiesList; private double curTotalCost; private double localCost; private int curChildIndex = -1; private int prevChildIndex = -1; private int curPropertyPairIndex = 0; private final List<GroupExpression> childrenBestExprList = Lists.newArrayList(); private final List<PhysicalPropertySet> childrenOutputProperties = Lists.newArrayList(); private static final Logger LOG = LogManager.getLogger(EnforceAndCostTask.class); EnforceAndCostTask(TaskContext context, GroupExpression expression) { super(context); this.groupExpression = expression; } @Override public Object clone() { EnforceAndCostTask task = null; try { task = (EnforceAndCostTask) super.clone(); } catch (CloneNotSupportedException ignored) { } return task; } @Override public String toString() { return "EnforceAndCostTask for groupExpression " + groupExpression + "\n curChildIndex " + curChildIndex + "\n prevChildIndex " + prevChildIndex + "\n curTotalCost " + curTotalCost; } @Override private boolean checkCTEPropertyValid(GroupExpression groupExpression, PhysicalPropertySet requiredPropertySet) { OperatorType operatorType = groupExpression.getOp().getOpType(); CTEProperty property = requiredPropertySet.getCteProperty(); CTEProperty usedCTEs; switch (operatorType) { case PHYSICAL_CTE_ANCHOR: case PHYSICAL_CTE_PRODUCE: usedCTEs = groupExpression.getGroup().getLogicalProperty().getUsedCTEs(); return property.getCteIds().containsAll(usedCTEs.getCteIds()); case PHYSICAL_CTE_CONSUME: PhysicalCTEConsumeOperator consumeOperator = (PhysicalCTEConsumeOperator) groupExpression.getOp(); return property.getCteIds().contains(consumeOperator.getCteId()); case PHYSICAL_NO_CTE: PhysicalNoCTEOperator noCTEOperator = (PhysicalNoCTEOperator) groupExpression.getOp(); return !property.getCteIds().contains(noCTEOperator.getCteId()); default: return true; } } private void initRequiredProperties() { if (curChildIndex != -1) { return; } localCost = 0; curTotalCost = 0; RequiredPropertyDeriver requiredPropertyDeriver = new RequiredPropertyDeriver(context); childrenRequiredPropertiesList = requiredPropertyDeriver.getRequiredProps(groupExpression); curChildIndex = 0; } private void optimizeChildGroup(PhysicalPropertySet inputProperty, Group childGroup) { pushTask((EnforceAndCostTask) clone()); double newUpperBound = context.getUpperBoundCost() - curTotalCost; TaskContext taskContext = new TaskContext(context.getOptimizerContext(), inputProperty, context.getRequiredColumns(), newUpperBound); pushTask(new OptimizeGroupTask(taskContext, childGroup)); } private boolean checkBroadcastRowCountLimit(PhysicalPropertySet inputProperty, GroupExpression childBestExpr) { if (!inputProperty.getDistributionProperty().isBroadcast()) { return true; } if (!OperatorType.PHYSICAL_HASH_JOIN.equals(groupExpression.getOp().getOpType())) { return true; } PhysicalJoinOperator node = (PhysicalJoinOperator) groupExpression.getOp(); double childCost = childBestExpr.getCost(inputProperty); if (JoinOperator.HINT_BROADCAST.equals(node.getJoinHint()) && childCost == Double.POSITIVE_INFINITY) { List<PhysicalPropertySet> childInputProperties = childBestExpr.getInputProperties(inputProperty); childBestExpr.updatePropertyWithCost(inputProperty, childInputProperties, 0); } ColumnRefSet leftChildColumns = groupExpression.getChildOutputColumns(0); ColumnRefSet rightChildColumns = groupExpression.getChildOutputColumns(1); List<BinaryPredicateOperator> equalOnPredicate = JoinHelper .getEqualsPredicate(leftChildColumns, rightChildColumns, Utils.extractConjuncts(node.getOnPredicate())); if (JoinHelper.onlyBroadcast(node.getJoinType(), equalOnPredicate, node.getJoinHint())) { return true; } ConnectContext ctx = ConnectContext.get(); SessionVariable sv = ConnectContext.get().getSessionVariable(); int beNum = Math.max(1, ctx.getAliveBackendNumber()); Statistics leftChildStats = groupExpression.getInputs().get(curChildIndex - 1).getStatistics(); Statistics rightChildStats = groupExpression.getInputs().get(curChildIndex).getStatistics(); if (leftChildStats == null || rightChildStats == null) { return false; } double leftOutputSize = leftChildStats.getOutputSize(groupExpression.getChildOutputColumns(curChildIndex - 1)); double rightOutputSize = rightChildStats.getOutputSize(groupExpression.getChildOutputColumns(curChildIndex)); if (leftOutputSize < rightOutputSize * beNum * sv.getBroadcastRightTableScaleFactor() && rightChildStats.getOutputRowCount() > sv.getBroadcastRowCountLimit()) { return false; } return true; } private void setSatisfiedPropertyWithCost(PhysicalPropertySet outputProperty, List<PhysicalPropertySet> childrenOutputProperties) { setPropertyWithCost(groupExpression, outputProperty, childrenOutputProperties); if (outputProperty.getCteProperty().isEmpty()) { setPropertyWithCost(groupExpression, outputProperty, PhysicalPropertySet.EMPTY, childrenOutputProperties); } } private void recordCostsAndEnforce(PhysicalPropertySet outputProperty, List<PhysicalPropertySet> childrenOutputProperties) { curTotalCost -= localCost; localCost = CostModel.calculateCostWithChildrenOutProperty(groupExpression, childrenOutputProperties); curTotalCost += localCost; setSatisfiedPropertyWithCost(outputProperty, childrenOutputProperties); PhysicalPropertySet requiredProperty = context.getRequiredProperty(); recordPlanEnumInfo(groupExpression, outputProperty, childrenOutputProperties); if (!outputProperty.isSatisfy(requiredProperty)) { PhysicalPropertySet enforcedProperty = enforceProperty(outputProperty, requiredProperty); if (!enforcedProperty.equals(requiredProperty)) { setPropertyWithCost(groupExpression.getGroup().getBestExpression(enforcedProperty), enforcedProperty, requiredProperty, Lists.newArrayList(outputProperty)); } } else { if (!outputProperty.equals(requiredProperty)) { setPropertyWithCost(groupExpression, outputProperty, requiredProperty, childrenOutputProperties); } } if (curTotalCost < context.getUpperBoundCost()) { LOG.debug("Update upperBoundCost: prev={} curr={}", context.getUpperBoundCost(), curTotalCost); context.setUpperBoundCost(curTotalCost); } } private boolean canGenerateOneStageAgg(GroupExpression childBestExpr) { if (!OperatorType.PHYSICAL_HASH_AGG.equals(groupExpression.getOp().getOpType())) { return true; } int aggStage = ConnectContext.get().getSessionVariable().getNewPlannerAggStage(); if (aggStage == 1) { return true; } if (childBestExpr.getOp() instanceof PhysicalDistributionOperator) { PhysicalDistributionOperator distributionOperator = (PhysicalDistributionOperator) childBestExpr.getOp(); if (childBestExpr.getOp().hasLimit() && distributionOperator.getDistributionSpec().getType() .equals(DistributionSpec.DistributionType.GATHER)) { return true; } } PhysicalHashAggregateOperator aggregate = (PhysicalHashAggregateOperator) groupExpression.getOp(); List<CallOperator> distinctAggCallOperator = aggregate.getAggregations().values().stream() .filter(CallOperator::isDistinct).collect(Collectors.toList()); if (aggregate.getType().isGlobal() && !aggregate.isSplit() && childBestExpr.getOp() instanceof PhysicalDistributionOperator) { if (aggregate.getDistinctColumnDataSkew() != null) { return true; } if (groupExpression.getGroup().getStatistics().getColumnStatistics().values().stream() .anyMatch(ColumnStatistic::isUnknown) || childBestExpr.getGroup().getStatistics().isTableRowCountMayInaccurate()) { return false; } if (distinctAggCallOperator.size() > 0) { return false; } return aggregate.getGroupBys().size() <= 1; } return true; } private boolean computeCurrentGroupStatistics() { if (groupExpression.getInputs().stream().anyMatch(group -> group.getStatistics() == null)) { return false; } Preconditions.checkNotNull(groupExpression.getGroup().getStatistics()); return true; } private void setPropertyWithCost(GroupExpression groupExpression, PhysicalPropertySet outputProperty, PhysicalPropertySet requiredProperty, List<PhysicalPropertySet> childrenOutputProperties) { if (groupExpression.updatePropertyWithCost(requiredProperty, childrenOutputProperties, curTotalCost)) { groupExpression.setOutputPropertySatisfyRequiredProperty(outputProperty, requiredProperty); } this.groupExpression.getGroup().setBestExpression(groupExpression, curTotalCost, requiredProperty); } private void recordPlanEnumInfo(GroupExpression groupExpression, PhysicalPropertySet outputProperty, List<PhysicalPropertySet> childrenOutputProperties) { if (ConnectContext.get().getSessionVariable().isSetUseNthExecPlan()) { groupExpression.addValidOutputPropertyGroup(outputProperty, childrenOutputProperties); groupExpression.getGroup().addSatisfyOutputPropertyGroupExpression(outputProperty, groupExpression); } } private void setPropertyWithCost(GroupExpression groupExpression, PhysicalPropertySet requiredProperty, List<PhysicalPropertySet> childrenOutputProperties) { setPropertyWithCost(groupExpression, requiredProperty, requiredProperty, childrenOutputProperties); } private PhysicalPropertySet enforceProperty(PhysicalPropertySet outputProperty, PhysicalPropertySet requiredProperty) { boolean satisfyOrderProperty = outputProperty.getSortProperty().isSatisfy(requiredProperty.getSortProperty()); boolean satisfyDistributionProperty = outputProperty.getDistributionProperty().isSatisfy(requiredProperty.getDistributionProperty()); PhysicalPropertySet enforcedProperty = null; if (!satisfyDistributionProperty && satisfyOrderProperty) { if (requiredProperty.getSortProperty().isEmpty()) { enforcedProperty = enforceDistribute(outputProperty); } else { /* * The sorting attribute does not make sense when the sort property is not empty, * because after the data is redistributed, the original order requirements cannot be guaranteed. * So we need to enforce "SortNode" here * * Because we build a parent-child relationship based on property. * So here we hack to eliminate the original property to prevent an endless loop * eg: [order by v1, gather] -> [order by v1, shuffle] -> [order by v1, shuffle] may endless loop, * because repartition require sort again */ PhysicalPropertySet newProperty = new PhysicalPropertySet(EmptyDistributionProperty.INSTANCE, EmptySortProperty.INSTANCE, outputProperty.getCteProperty()); groupExpression.getGroup().replaceBestExpressionProperty(outputProperty, newProperty, groupExpression.getCost(outputProperty)); enforcedProperty = enforceSortAndDistribute(newProperty, requiredProperty); } } else if (satisfyDistributionProperty && !satisfyOrderProperty) { enforcedProperty = enforceSort(outputProperty); } else if (!satisfyDistributionProperty) { enforcedProperty = enforceSortAndDistribute(outputProperty, requiredProperty); } return enforcedProperty; } private PhysicalPropertySet enforceDistribute(PhysicalPropertySet oldOutputProperty) { PhysicalPropertySet requiredPropertySet = oldOutputProperty.copy(); requiredPropertySet.setDistributionProperty(context.getRequiredProperty() .getDistributionProperty().getNullStrictProperty()); GroupExpression enforcer = requiredPropertySet.getDistributionProperty() .appendEnforcers(groupExpression.getGroup()); PhysicalPropertySet newOutputProperty = updateCostAndOutputPropertySet(enforcer, oldOutputProperty, requiredPropertySet); recordPlanEnumInfo(enforcer, newOutputProperty, Lists.newArrayList(oldOutputProperty)); return newOutputProperty; } private PhysicalPropertySet enforceSort(PhysicalPropertySet oldOutputProperty) { PhysicalPropertySet newOutputProperty = oldOutputProperty.copy(); newOutputProperty.setSortProperty(context.getRequiredProperty().getSortProperty()); GroupExpression enforcer = context.getRequiredProperty().getSortProperty().appendEnforcers(groupExpression.getGroup()); updateCostWithEnforcer(enforcer, oldOutputProperty, newOutputProperty); recordPlanEnumInfo(enforcer, newOutputProperty, Lists.newArrayList(oldOutputProperty)); return newOutputProperty; } private PhysicalPropertySet enforceSortAndDistribute(PhysicalPropertySet outputProperty, PhysicalPropertySet requiredProperty) { PhysicalPropertySet enforcedProperty; if (requiredProperty.getDistributionProperty().getSpec() .equals(DistributionSpec.createGatherDistributionSpec())) { enforcedProperty = enforceSort(outputProperty); enforcedProperty = enforceDistribute(enforcedProperty); } else { enforcedProperty = enforceDistribute(outputProperty); enforcedProperty = enforceSort(enforcedProperty); } return enforcedProperty; } private void updateCostWithEnforcer(GroupExpression enforcer, PhysicalPropertySet oldOutputProperty, PhysicalPropertySet newOutputProperty) { context.getOptimizerContext().getMemo(). insertEnforceExpression(enforcer, groupExpression.getGroup()); if (enforcer.updatePropertyWithCost(newOutputProperty, Lists.newArrayList(oldOutputProperty), curTotalCost)) { enforcer.setOutputPropertySatisfyRequiredProperty(newOutputProperty, newOutputProperty); } groupExpression.getGroup().setBestExpression(enforcer, curTotalCost, newOutputProperty); } private PhysicalPropertySet updateCostAndOutputPropertySet(GroupExpression enforcer, PhysicalPropertySet oldOutputProperty, PhysicalPropertySet requiredPropertySet) { context.getOptimizerContext().getMemo().insertEnforceExpression(enforcer, groupExpression.getGroup()); curTotalCost += CostModel.calculateCost(enforcer); PhysicalPropertySet newOutputProperty = groupExpression.getGroup().updateOutputPropertySet(enforcer, curTotalCost, requiredPropertySet); if (enforcer.updatePropertyWithCost(newOutputProperty, Lists.newArrayList(oldOutputProperty), curTotalCost)) { enforcer.setOutputPropertySatisfyRequiredProperty(newOutputProperty, newOutputProperty); } return newOutputProperty; } }
class EnforceAndCostTask extends OptimizerTask implements Cloneable { private final GroupExpression groupExpression; private List<List<PhysicalPropertySet>> childrenRequiredPropertiesList; private double curTotalCost; private double localCost; private int curChildIndex = -1; private int prevChildIndex = -1; private int curPropertyPairIndex = 0; private final List<GroupExpression> childrenBestExprList = Lists.newArrayList(); private final List<PhysicalPropertySet> childrenOutputProperties = Lists.newArrayList(); private static final Logger LOG = LogManager.getLogger(EnforceAndCostTask.class); EnforceAndCostTask(TaskContext context, GroupExpression expression) { super(context); this.groupExpression = expression; } @Override public Object clone() { EnforceAndCostTask task = null; try { task = (EnforceAndCostTask) super.clone(); } catch (CloneNotSupportedException ignored) { } return task; } @Override public String toString() { return "EnforceAndCostTask for groupExpression " + groupExpression + "\n curChildIndex " + curChildIndex + "\n prevChildIndex " + prevChildIndex + "\n curTotalCost " + curTotalCost; } @Override private boolean checkCTEPropertyValid(GroupExpression groupExpression, PhysicalPropertySet requiredPropertySet) { OperatorType operatorType = groupExpression.getOp().getOpType(); CTEProperty property = requiredPropertySet.getCteProperty(); CTEProperty usedCTEs; switch (operatorType) { case PHYSICAL_CTE_ANCHOR: case PHYSICAL_CTE_PRODUCE: usedCTEs = groupExpression.getGroup().getLogicalProperty().getUsedCTEs(); return property.getCteIds().containsAll(usedCTEs.getCteIds()); case PHYSICAL_CTE_CONSUME: PhysicalCTEConsumeOperator consumeOperator = (PhysicalCTEConsumeOperator) groupExpression.getOp(); return property.getCteIds().contains(consumeOperator.getCteId()); case PHYSICAL_NO_CTE: PhysicalNoCTEOperator noCTEOperator = (PhysicalNoCTEOperator) groupExpression.getOp(); return !property.getCteIds().contains(noCTEOperator.getCteId()); default: return true; } } private void initRequiredProperties() { if (curChildIndex != -1) { return; } localCost = 0; curTotalCost = 0; RequiredPropertyDeriver requiredPropertyDeriver = new RequiredPropertyDeriver(context); childrenRequiredPropertiesList = requiredPropertyDeriver.getRequiredProps(groupExpression); curChildIndex = 0; } private void optimizeChildGroup(PhysicalPropertySet inputProperty, Group childGroup) { pushTask((EnforceAndCostTask) clone()); double newUpperBound = context.getUpperBoundCost() - curTotalCost; TaskContext taskContext = new TaskContext(context.getOptimizerContext(), inputProperty, context.getRequiredColumns(), newUpperBound); pushTask(new OptimizeGroupTask(taskContext, childGroup)); } private boolean checkBroadcastRowCountLimit(PhysicalPropertySet inputProperty, GroupExpression childBestExpr) { if (!inputProperty.getDistributionProperty().isBroadcast()) { return true; } if (!OperatorType.PHYSICAL_HASH_JOIN.equals(groupExpression.getOp().getOpType())) { return true; } PhysicalJoinOperator node = (PhysicalJoinOperator) groupExpression.getOp(); double childCost = childBestExpr.getCost(inputProperty); if (JoinOperator.HINT_BROADCAST.equals(node.getJoinHint()) && childCost == Double.POSITIVE_INFINITY) { List<PhysicalPropertySet> childInputProperties = childBestExpr.getInputProperties(inputProperty); childBestExpr.updatePropertyWithCost(inputProperty, childInputProperties, 0); } ColumnRefSet leftChildColumns = groupExpression.getChildOutputColumns(0); ColumnRefSet rightChildColumns = groupExpression.getChildOutputColumns(1); List<BinaryPredicateOperator> equalOnPredicate = JoinHelper .getEqualsPredicate(leftChildColumns, rightChildColumns, Utils.extractConjuncts(node.getOnPredicate())); if (JoinHelper.onlyBroadcast(node.getJoinType(), equalOnPredicate, node.getJoinHint())) { return true; } ConnectContext ctx = ConnectContext.get(); SessionVariable sv = ConnectContext.get().getSessionVariable(); int beNum = Math.max(1, ctx.getAliveBackendNumber()); Statistics leftChildStats = groupExpression.getInputs().get(curChildIndex - 1).getStatistics(); Statistics rightChildStats = groupExpression.getInputs().get(curChildIndex).getStatistics(); if (leftChildStats == null || rightChildStats == null) { return false; } double leftOutputSize = leftChildStats.getOutputSize(groupExpression.getChildOutputColumns(curChildIndex - 1)); double rightOutputSize = rightChildStats.getOutputSize(groupExpression.getChildOutputColumns(curChildIndex)); if (leftOutputSize < rightOutputSize * beNum * sv.getBroadcastRightTableScaleFactor() && rightChildStats.getOutputRowCount() > sv.getBroadcastRowCountLimit()) { return false; } return true; } private void setSatisfiedPropertyWithCost(PhysicalPropertySet outputProperty, List<PhysicalPropertySet> childrenOutputProperties) { setPropertyWithCost(groupExpression, outputProperty, childrenOutputProperties); if (outputProperty.getCteProperty().isEmpty()) { setPropertyWithCost(groupExpression, outputProperty, PhysicalPropertySet.EMPTY, childrenOutputProperties); } } private void recordCostsAndEnforce(PhysicalPropertySet outputProperty, List<PhysicalPropertySet> childrenOutputProperties) { curTotalCost -= localCost; localCost = CostModel.calculateCostWithChildrenOutProperty(groupExpression, childrenOutputProperties); curTotalCost += localCost; setSatisfiedPropertyWithCost(outputProperty, childrenOutputProperties); PhysicalPropertySet requiredProperty = context.getRequiredProperty(); recordPlanEnumInfo(groupExpression, outputProperty, childrenOutputProperties); if (!outputProperty.isSatisfy(requiredProperty)) { PhysicalPropertySet enforcedProperty = enforceProperty(outputProperty, requiredProperty); if (!enforcedProperty.equals(requiredProperty)) { setPropertyWithCost(groupExpression.getGroup().getBestExpression(enforcedProperty), enforcedProperty, requiredProperty, Lists.newArrayList(outputProperty)); } } else { if (!outputProperty.equals(requiredProperty)) { setPropertyWithCost(groupExpression, outputProperty, requiredProperty, childrenOutputProperties); } } if (curTotalCost < context.getUpperBoundCost()) { LOG.debug("Update upperBoundCost: prev={} curr={}", context.getUpperBoundCost(), curTotalCost); context.setUpperBoundCost(curTotalCost); } } private boolean canGenerateOneStageAgg(GroupExpression childBestExpr) { if (!OperatorType.PHYSICAL_HASH_AGG.equals(groupExpression.getOp().getOpType())) { return true; } int aggStage = ConnectContext.get().getSessionVariable().getNewPlannerAggStage(); if (aggStage == 1) { return true; } if (childBestExpr.getOp() instanceof PhysicalDistributionOperator) { PhysicalDistributionOperator distributionOperator = (PhysicalDistributionOperator) childBestExpr.getOp(); if (childBestExpr.getOp().hasLimit() && distributionOperator.getDistributionSpec().getType() .equals(DistributionSpec.DistributionType.GATHER)) { return true; } } PhysicalHashAggregateOperator aggregate = (PhysicalHashAggregateOperator) groupExpression.getOp(); List<CallOperator> distinctAggCallOperator = aggregate.getAggregations().values().stream() .filter(CallOperator::isDistinct).collect(Collectors.toList()); if (aggregate.getType().isGlobal() && !aggregate.isSplit() && childBestExpr.getOp() instanceof PhysicalDistributionOperator) { if (aggregate.getDistinctColumnDataSkew() != null) { return true; } if (groupExpression.getGroup().getStatistics().getColumnStatistics().values().stream() .anyMatch(ColumnStatistic::isUnknown) || childBestExpr.getGroup().getStatistics().isTableRowCountMayInaccurate()) { return false; } if (distinctAggCallOperator.size() > 0) { return false; } return aggregate.getGroupBys().size() <= 1; } return true; } private boolean computeCurrentGroupStatistics() { if (groupExpression.getInputs().stream().anyMatch(group -> group.getStatistics() == null)) { return false; } Preconditions.checkNotNull(groupExpression.getGroup().getStatistics()); return true; } private void setPropertyWithCost(GroupExpression groupExpression, PhysicalPropertySet outputProperty, PhysicalPropertySet requiredProperty, List<PhysicalPropertySet> childrenOutputProperties) { if (groupExpression.updatePropertyWithCost(requiredProperty, childrenOutputProperties, curTotalCost)) { groupExpression.setOutputPropertySatisfyRequiredProperty(outputProperty, requiredProperty); } this.groupExpression.getGroup().setBestExpression(groupExpression, curTotalCost, requiredProperty); } private void recordPlanEnumInfo(GroupExpression groupExpression, PhysicalPropertySet outputProperty, List<PhysicalPropertySet> childrenOutputProperties) { if (ConnectContext.get().getSessionVariable().isSetUseNthExecPlan()) { groupExpression.addValidOutputPropertyGroup(outputProperty, childrenOutputProperties); groupExpression.getGroup().addSatisfyOutputPropertyGroupExpression(outputProperty, groupExpression); } } private void setPropertyWithCost(GroupExpression groupExpression, PhysicalPropertySet requiredProperty, List<PhysicalPropertySet> childrenOutputProperties) { setPropertyWithCost(groupExpression, requiredProperty, requiredProperty, childrenOutputProperties); } private PhysicalPropertySet enforceProperty(PhysicalPropertySet outputProperty, PhysicalPropertySet requiredProperty) { boolean satisfyOrderProperty = outputProperty.getSortProperty().isSatisfy(requiredProperty.getSortProperty()); boolean satisfyDistributionProperty = outputProperty.getDistributionProperty().isSatisfy(requiredProperty.getDistributionProperty()); PhysicalPropertySet enforcedProperty = null; if (!satisfyDistributionProperty && satisfyOrderProperty) { if (requiredProperty.getSortProperty().isEmpty()) { enforcedProperty = enforceDistribute(outputProperty); } else { /* * The sorting attribute does not make sense when the sort property is not empty, * because after the data is redistributed, the original order requirements cannot be guaranteed. * So we need to enforce "SortNode" here * * Because we build a parent-child relationship based on property. * So here we hack to eliminate the original property to prevent an endless loop * eg: [order by v1, gather] -> [order by v1, shuffle] -> [order by v1, shuffle] may endless loop, * because repartition require sort again */ PhysicalPropertySet newProperty = new PhysicalPropertySet(EmptyDistributionProperty.INSTANCE, EmptySortProperty.INSTANCE, outputProperty.getCteProperty()); groupExpression.getGroup().replaceBestExpressionProperty(outputProperty, newProperty, groupExpression.getCost(outputProperty)); enforcedProperty = enforceSortAndDistribute(newProperty, requiredProperty); } } else if (satisfyDistributionProperty && !satisfyOrderProperty) { enforcedProperty = enforceSort(outputProperty); } else if (!satisfyDistributionProperty) { enforcedProperty = enforceSortAndDistribute(outputProperty, requiredProperty); } return enforcedProperty; } private PhysicalPropertySet enforceDistribute(PhysicalPropertySet oldOutputProperty) { PhysicalPropertySet requiredPropertySet = oldOutputProperty.copy(); requiredPropertySet.setDistributionProperty(context.getRequiredProperty() .getDistributionProperty().getNullStrictProperty()); GroupExpression enforcer = requiredPropertySet.getDistributionProperty() .appendEnforcers(groupExpression.getGroup()); PhysicalPropertySet newOutputProperty = updateCostAndOutputPropertySet(enforcer, oldOutputProperty, requiredPropertySet); recordPlanEnumInfo(enforcer, newOutputProperty, Lists.newArrayList(oldOutputProperty)); return newOutputProperty; } private PhysicalPropertySet enforceSort(PhysicalPropertySet oldOutputProperty) { PhysicalPropertySet newOutputProperty = oldOutputProperty.copy(); newOutputProperty.setSortProperty(context.getRequiredProperty().getSortProperty()); GroupExpression enforcer = context.getRequiredProperty().getSortProperty().appendEnforcers(groupExpression.getGroup()); updateCostWithEnforcer(enforcer, oldOutputProperty, newOutputProperty); recordPlanEnumInfo(enforcer, newOutputProperty, Lists.newArrayList(oldOutputProperty)); return newOutputProperty; } private PhysicalPropertySet enforceSortAndDistribute(PhysicalPropertySet outputProperty, PhysicalPropertySet requiredProperty) { PhysicalPropertySet enforcedProperty; if (requiredProperty.getDistributionProperty().getSpec() .equals(DistributionSpec.createGatherDistributionSpec())) { enforcedProperty = enforceSort(outputProperty); enforcedProperty = enforceDistribute(enforcedProperty); } else { enforcedProperty = enforceDistribute(outputProperty); enforcedProperty = enforceSort(enforcedProperty); } return enforcedProperty; } private void updateCostWithEnforcer(GroupExpression enforcer, PhysicalPropertySet oldOutputProperty, PhysicalPropertySet newOutputProperty) { context.getOptimizerContext().getMemo(). insertEnforceExpression(enforcer, groupExpression.getGroup()); if (enforcer.updatePropertyWithCost(newOutputProperty, Lists.newArrayList(oldOutputProperty), curTotalCost)) { enforcer.setOutputPropertySatisfyRequiredProperty(newOutputProperty, newOutputProperty); } groupExpression.getGroup().setBestExpression(enforcer, curTotalCost, newOutputProperty); } private PhysicalPropertySet updateCostAndOutputPropertySet(GroupExpression enforcer, PhysicalPropertySet oldOutputProperty, PhysicalPropertySet requiredPropertySet) { context.getOptimizerContext().getMemo().insertEnforceExpression(enforcer, groupExpression.getGroup()); curTotalCost += CostModel.calculateCost(enforcer); PhysicalPropertySet newOutputProperty = groupExpression.getGroup().updateOutputPropertySet(enforcer, curTotalCost, requiredPropertySet); if (enforcer.updatePropertyWithCost(newOutputProperty, Lists.newArrayList(oldOutputProperty), curTotalCost)) { enforcer.setOutputPropertySatisfyRequiredProperty(newOutputProperty, newOutputProperty); } return newOutputProperty; } }
imo, using a constant fro `$0092` will improve the readability of the code. It is not mandatory because it is using only in here.
public Value getChildByName(String name) throws DebugVariableException { if (namedChildVariables == null) { namedChildVariables = computeChildVariables(); } if (!namedChildVariables.containsKey(name)) { for (Map.Entry<String, Value> childVariable : namedChildVariables.entrySet()) { String escaped = childVariable.getKey().replaceAll("\\$0092(\\$0092)?", "$1"); if (escaped.equals(name)) { return childVariable.getValue(); } } throw new DebugVariableException("No child variables found with name: '" + name + "'"); } return namedChildVariables.get(name); }
String escaped = childVariable.getKey().replaceAll("\\$0092(\\$0092)?", "$1");
public Value getChildByName(String name) throws DebugVariableException { if (namedChildVariables == null) { namedChildVariables = computeChildVariables(); } if (!namedChildVariables.containsKey(name)) { for (Map.Entry<String, Value> childVariable : namedChildVariables.entrySet()) { String unicodeOfSlash = "&0092"; String escaped = childVariable.getKey() .replaceAll(String.format("\\%s(\\%s)?", unicodeOfSlash, unicodeOfSlash), "$1"); if (escaped.equals(name)) { return childVariable.getValue(); } } throw new DebugVariableException("No child variables found with name: '" + name + "'"); } return namedChildVariables.get(name); }
class NamedCompoundVariable extends BCompoundVariable { private Map<String, Value> namedChildVariables; public NamedCompoundVariable(SuspendedContext context, String varName, BVariableType bVarType, Value jvmValue) { super(context, varName, bVarType, jvmValue); } /** * Retrieves JDI value representations of all the child variables, as a map of named child variables (i.e. error * variable entries, object fields, record fields). * <p> * Each compound variable type with named child variables must have their own implementation to compute/fetch * values. */ protected abstract Map<String, Value> computeChildVariables(); /** * Retrieves JDI value representations of all the child variables, as a map of named child variables (i.e. error * variable entries, object fields, record fields). * <p> */ public Map<String, Value> getNamedChildVariables() { if (namedChildVariables == null) { namedChildVariables = computeChildVariables(); } return namedChildVariables; } /** * Returns the JDI value representation of the child variable for a given name. */ @Override public Variable getDapVariable() { if (dapVariable == null) { dapVariable = new Variable(); dapVariable.setName(this.name); dapVariable.setType(this.type.getString()); dapVariable.setValue(computeValue()); dapVariable.setNamedVariables(getChildrenCount()); } return dapVariable; } }
class NamedCompoundVariable extends BCompoundVariable { private Map<String, Value> namedChildVariables; public NamedCompoundVariable(SuspendedContext context, String varName, BVariableType bVarType, Value jvmValue) { super(context, varName, bVarType, jvmValue); } /** * Retrieves JDI value representations of all the child variables, as a map of named child variables (i.e. error * variable entries, object fields, record fields). * <p> * Each compound variable type with named child variables must have their own implementation to compute/fetch * values. */ protected abstract Map<String, Value> computeChildVariables(); /** * Retrieves JDI value representations of all the child variables, as a map of named child variables (i.e. error * variable entries, object fields, record fields). */ public Map<String, Value> getNamedChildVariables() { if (namedChildVariables == null) { namedChildVariables = computeChildVariables(); } return namedChildVariables; } /** * Returns the JDI value representation of the child variable for a given name. */ @Override public Variable getDapVariable() { if (dapVariable == null) { dapVariable = new Variable(); dapVariable.setName(this.name); dapVariable.setType(this.type.getString()); dapVariable.setValue(computeValue()); dapVariable.setNamedVariables(getChildrenCount()); } return dapVariable; } }
should be `replayDropNode`? this will be only focus on warehouse restore its internal state, not triggering starmgr to drop the node again because starmgr itself have edit log and can replay to its final state.
public void replayDropBackend(Backend backend) { LOG.debug("replayDropBackend: {}", backend); Map<Long, Backend> copiedBackends = Maps.newHashMap(idToBackendRef); copiedBackends.remove(backend.getId()); idToBackendRef = ImmutableMap.copyOf(copiedBackends); Map<Long, AtomicLong> copiedReportVerions = Maps.newHashMap(idToReportVersionRef); copiedReportVerions.remove(backend.getId()); idToReportVersionRef = ImmutableMap.copyOf(copiedReportVerions); final Cluster cluster = GlobalStateMgr.getCurrentState().getCluster(); if (null != cluster) { cluster.removeBackend(backend.getId()); Warehouse warehouse = GlobalStateMgr.getCurrentWarehouseMgr(). getWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_NAME); if (warehouse != null) { warehouse.getAnyAvailableCluster().dropNode(backend.getId()); } if (RunMode.allowCreateLakeTable()) { long starletPort = backend.getStarletPort(); if (starletPort == 0) { return; } String workerAddr = backend.getHost() + ":" + starletPort; long workerId = GlobalStateMgr.getCurrentState().getStarOSAgent().getWorkerId(workerAddr); GlobalStateMgr.getCurrentState().getStarOSAgent().removeWorkerFromMap(workerId, workerAddr); } } else { LOG.error("Cluster {} no exist.", SystemInfoService.DEFAULT_CLUSTER); } }
warehouse.getAnyAvailableCluster().dropNode(backend.getId());
public void replayDropBackend(Backend backend) { LOG.debug("replayDropBackend: {}", backend); Map<Long, Backend> copiedBackends = Maps.newHashMap(idToBackendRef); copiedBackends.remove(backend.getId()); idToBackendRef = ImmutableMap.copyOf(copiedBackends); Map<Long, AtomicLong> copiedReportVerions = Maps.newHashMap(idToReportVersionRef); copiedReportVerions.remove(backend.getId()); idToReportVersionRef = ImmutableMap.copyOf(copiedReportVerions); dropComputeNodeFromWarehouse(backend); final Cluster cluster = GlobalStateMgr.getCurrentState().getCluster(); if (null != cluster) { cluster.removeBackend(backend.getId()); if (RunMode.allowCreateLakeTable()) { long starletPort = backend.getStarletPort(); if (starletPort == 0) { return; } String workerAddr = backend.getHost() + ":" + starletPort; long workerId = GlobalStateMgr.getCurrentState().getStarOSAgent().getWorkerId(workerAddr); GlobalStateMgr.getCurrentState().getStarOSAgent().removeWorkerFromMap(workerId, workerAddr); } } else { LOG.error("Cluster {} no exist.", SystemInfoService.DEFAULT_CLUSTER); } }
class SerializeData { @SerializedName("computeNodes") public List<ComputeNode> computeNodes; }
class SerializeData { @SerializedName("computeNodes") public List<ComputeNode> computeNodes; }
You are right, It's my mistake. I see this checkpointCoordinator isn't used, so removed it. For the convenience of review, friendly reminder: - It's recovered in the first commit, and I removed the `CheckpointCoordinator checkpointCoordinator =` due to it's not necessary. - I did some extra work in this PR during fix this comment. I migrate checkpoint coordinator related tests to junit5 in the 3rd commit. - The second and fourth commits are not updated.
private CheckpointCoordinator getCheckpointCoordinator(ExecutionGraph graph) throws Exception { return new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); }
.setTimer(manuallyTriggeredScheduledExecutor)
private CheckpointCoordinator getCheckpointCoordinator(ExecutionGraph graph) throws Exception { return new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); }
class CheckpointCoordinatorTest extends TestLogger { @ClassRule public static final TestExecutorResource<ScheduledExecutorService> EXECUTOR_RESOURCE = TestingUtils.defaultExecutorResource(); @Test public void testSharedStateNotDiscaredOnAbort() throws Exception { JobVertexID v1 = new JobVertexID(), v2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(v1) .addJobVertex(v2) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); coordinator.startCheckpointScheduler(); CompletableFuture<CompletedCheckpoint> cpFuture = coordinator.triggerCheckpoint(true); manuallyTriggeredScheduledExecutor.triggerAll(); cpFuture.getNow(null); TestingStreamStateHandle metaState = handle(); TestingStreamStateHandle privateState = handle(); TestingStreamStateHandle sharedState = handle(); ackCheckpoint(1L, coordinator, v1, graph, metaState, privateState, sharedState); declineCheckpoint(1L, coordinator, v2, graph); assertTrue(privateState.isDisposed()); assertTrue(metaState.isDisposed()); assertFalse(sharedState.isDisposed()); cpFuture = coordinator.triggerCheckpoint(true); manuallyTriggeredScheduledExecutor.triggerAll(); cpFuture.getNow(null); ackCheckpoint(2L, coordinator, v1, graph, handle(), handle(), handle()); ackCheckpoint(2L, coordinator, v2, graph, handle(), handle(), handle()); cpFuture.get(); assertTrue(sharedState.isDisposed()); } @Test public void testAbortedCheckpointStatsUpdatedAfterFailure() throws Exception { testReportStatsAfterFailure( 1L, (coordinator, execution, metrics) -> { coordinator.reportStats(1L, execution.getAttemptId(), metrics); return null; }); } @Test public void testCheckpointStatsUpdatedAfterFailure() throws Exception { testReportStatsAfterFailure( 1L, (coordinator, execution, metrics) -> coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( execution.getVertex().getJobId(), execution.getAttemptId(), 1L, metrics, new TaskStateSnapshot()), TASK_MANAGER_LOCATION_INFO)); } private void testReportStatsAfterFailure( long checkpointId, TriFunctionWithException< CheckpointCoordinator, Execution, CheckpointMetrics, ?, CheckpointException> reportFn) throws Exception { JobVertexID decliningVertexID = new JobVertexID(); JobVertexID lateReportVertexID = new JobVertexID(); ExecutionGraph executionGraph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(decliningVertexID) .addJobVertex(lateReportVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex decliningVertex = executionGraph.getJobVertex(decliningVertexID).getTaskVertices()[0]; ExecutionVertex lateReportVertex = executionGraph.getJobVertex(lateReportVertexID).getTaskVertices()[0]; CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(statsTracker) .build(executionGraph); CompletableFuture<CompletedCheckpoint> result = coordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkState( coordinator.getNumberOfPendingCheckpoints() == 1, "wrong number of pending checkpoints: %s", coordinator.getNumberOfPendingCheckpoints()); if (result.isDone()) { result.get(); } coordinator.receiveDeclineMessage( new DeclineCheckpoint( executionGraph.getJobID(), decliningVertex.getCurrentExecutionAttempt().getAttemptId(), checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), "test"); CheckpointMetrics lateReportedMetrics = new CheckpointMetricsBuilder() .setTotalBytesPersisted(18) .setBytesPersistedOfThisCheckpoint(18) .setBytesProcessedDuringAlignment(19) .setAsyncDurationMillis(20) .setAlignmentDurationNanos(123 * 1_000_000) .setCheckpointStartDelayNanos(567 * 1_000_000) .build(); reportFn.apply( coordinator, lateReportVertex.getCurrentExecutionAttempt(), lateReportedMetrics); assertStatsEqual( checkpointId, lateReportVertex.getJobvertexId(), 0, lateReportedMetrics, statsTracker.createSnapshot().getHistory().getCheckpointById(checkpointId)); } private boolean hasNoSubState(OperatorState s) { return s.getNumberCollectedStates() == 0; } private void assertStatsEqual( long checkpointId, JobVertexID jobVertexID, int subtasIdx, CheckpointMetrics expected, AbstractCheckpointStats actual) { assertEquals(checkpointId, actual.getCheckpointId()); assertEquals(CheckpointStatsStatus.FAILED, actual.getStatus()); assertEquals(0, actual.getNumberOfAcknowledgedSubtasks()); assertStatsMetrics(jobVertexID, subtasIdx, expected, actual); } public static void assertStatsMetrics( JobVertexID jobVertexID, int subtasIdx, CheckpointMetrics expected, AbstractCheckpointStats actual) { assertEquals(expected.getTotalBytesPersisted(), actual.getStateSize()); SubtaskStateStats taskStats = actual.getAllTaskStateStats().stream() .filter(s -> s.getJobVertexId().equals(jobVertexID)) .findAny() .get() .getSubtaskStats()[subtasIdx]; assertEquals( expected.getAlignmentDurationNanos() / 1_000_000, taskStats.getAlignmentDuration()); assertEquals(expected.getUnalignedCheckpoint(), taskStats.getUnalignedCheckpoint()); assertEquals(expected.getAsyncDurationMillis(), taskStats.getAsyncCheckpointDuration()); assertEquals( expected.getAlignmentDurationNanos() / 1_000_000, taskStats.getAlignmentDuration()); assertEquals( expected.getCheckpointStartDelayNanos() / 1_000_000, taskStats.getCheckpointStartDelay()); } private static final String TASK_MANAGER_LOCATION_INFO = "Unknown location"; private ManuallyTriggeredScheduledExecutor manuallyTriggeredScheduledExecutor; @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); @Before public void setUp() throws Exception { manuallyTriggeredScheduledExecutor = new ManuallyTriggeredScheduledExecutor(); } @Test public void testScheduleTriggerRequestDuringShutdown() throws Exception { ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator coordinator = getCheckpointCoordinator(new ScheduledExecutorServiceAdapter(executor)); coordinator.shutdown(); executor.shutdownNow(); coordinator.scheduleTriggerRequest(); } @Test public void testMinCheckpointPause() throws Exception { ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator coordinator = null; try { int pause = 1000; JobVertexID jobVertexId = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexId) .setMainThreadExecutor( ComponentMainThreadExecutorServiceAdapter .forSingleThreadExecutor( new DirectScheduledExecutorService())) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex = graph.getJobVertex(jobVertexId).getTaskVertices()[0]; ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); coordinator = new CheckpointCoordinatorBuilder() .setTimer(new ScheduledExecutorServiceAdapter(executorService)) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointInterval(pause) .setCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(1) .setMinPauseBetweenCheckpoints(pause) .build()) .build(graph); coordinator.startCheckpointScheduler(); coordinator.triggerCheckpoint( true); coordinator.triggerCheckpoint( true); while (coordinator.getPendingCheckpoints().values().stream() .noneMatch(pc -> pc.getCheckpointStorageLocation() != null)) { Thread.sleep(10); } coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptId, 1L), TASK_MANAGER_LOCATION_INFO); Thread.sleep(pause / 2); assertEquals(0, coordinator.getNumberOfPendingCheckpoints()); while (coordinator.getNumberOfPendingCheckpoints() == 0) { Thread.sleep(1); } } finally { if (coordinator != null) { coordinator.shutdown(); } executorService.shutdownNow(); } } @Test public void testCheckpointAbortsIfTriggerTasksAreNotExecuted() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID(), false) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.shutdown(); } @Test public void testCheckpointAbortsIfTriggerTasksAreFinished() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); Arrays.stream(graph.getJobVertex(jobVertexID1).getTaskVertices()) .forEach(task -> task.getCurrentExecutionAttempt().markFinished()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.shutdown(); } @Test public void testCheckpointTriggeredAfterSomeTasksFinishedIfAllowed() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1, 3, 256) .addJobVertex(jobVertexID2, 3, 256) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); ExecutionJobVertex jobVertex2 = graph.getJobVertex(jobVertexID2); jobVertex1.getTaskVertices()[0].getCurrentExecutionAttempt().markFinished(); jobVertex1.getTaskVertices()[1].getCurrentExecutionAttempt().markFinished(); jobVertex2.getTaskVertices()[1].getCurrentExecutionAttempt().markFinished(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .setCheckpointStatsTracker(statsTracker) .build(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(checkpointFuture.isDone()); assertFalse(checkpointFuture.isCompletedExceptionally()); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); PendingCheckpoint pendingCheckpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); AbstractCheckpointStats checkpointStats = statsTracker .createSnapshot() .getHistory() .getCheckpointById(pendingCheckpoint.getCheckpointID()); assertEquals(3, checkpointStats.getNumberOfAcknowledgedSubtasks()); for (ExecutionVertex task : Arrays.asList( jobVertex1.getTaskVertices()[0], jobVertex1.getTaskVertices()[1], jobVertex2.getTaskVertices()[1])) { assertNotNull( checkpointStats.getTaskStateStats(task.getJobvertexId()) .getSubtaskStats()[task.getParallelSubtaskIndex()]); } } @Test public void testTasksFinishDuringTriggering() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .setTransitToRunning(false) .addJobVertex(jobVertexID1, 1, 256) .addJobVertex(jobVertexID2, 1, 256) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); ExecutionVertex taskVertex = jobVertex1.getTaskVertices()[0]; ExecutionJobVertex jobVertex2 = graph.getJobVertex(jobVertexID2); ExecutionVertex taskVertex2 = jobVertex2.getTaskVertices()[0]; AtomicBoolean checkpointAborted = new AtomicBoolean(false); LogicalSlot slot1 = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public CompletableFuture<Acknowledge> triggerCheckpoint( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long timestamp, CheckpointOptions checkpointOptions) { taskVertex.getCurrentExecutionAttempt().markFinished(); return FutureUtils.completedExceptionally( new RpcException("")); } }) .createTestingLogicalSlot(); LogicalSlot slot2 = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public void notifyCheckpointAborted( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long latestCompletedCheckpointId, long timestamp) { checkpointAborted.set(true); } }) .createTestingLogicalSlot(); ExecutionGraphTestUtils.setVertexResource(taskVertex, slot1); taskVertex.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); ExecutionGraphTestUtils.setVertexResource(taskVertex2, slot2); taskVertex2.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .build(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertTrue(checkpointAborted.get()); } @Test public void testTriggerAndDeclineCheckpointThenFailureManagerThrowsException() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; final ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); final ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { final CompletableFuture<CompletedCheckpoint> checkPointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkPointFuture); long checkpointId = checkpointCoordinator .getPendingCheckpoints() .entrySet() .iterator() .next() .getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, errorMsg); } finally { checkpointCoordinator.shutdown(); } } @Test public void testIOExceptionCheckpointExceedsTolerableFailureNumber() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); final String expectedErrorMessage = "Expected Error Message"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(expectedErrorMessage); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkpointCoordinator.abortPendingCheckpoints(new CheckpointException(IO_EXCEPTION)); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, expectedErrorMessage); } finally { checkpointCoordinator.shutdown(); } } @Test public void testIOExceptionForPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new IOExceptionCheckpointStorage()); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = checkpointCoordinator.triggerCheckpoint( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint after IOException occurred."); } catch (Exception e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); if (!checkpointExceptionOptional.isPresent() || checkpointExceptionOptional.get().getCheckpointFailureReason() != IO_EXCEPTION) { throw e; } } } /** Tests that do not trigger checkpoint when IOException occurred. */ @Test public void testTriggerCheckpointAfterCheckpointStorageIOException() throws Exception { TestFailJobCallback failureCallback = new TestFailJobCallback(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointStatsTracker(statsTracker) .setFailureManager(new CheckpointFailureManager(0, failureCallback)) .setCheckpointStorage(new IOExceptionCheckpointStorage()) .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); testTriggerCheckpoint(checkpointCoordinator, IO_EXCEPTION); assertEquals(1, failureCallback.getInvokeCounter()); assertNotNull(statsTracker.getPendingCheckpointStats(1)); } @Test public void testCheckpointAbortsIfTriggerTasksAreFinishedAndIOException() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointStorage(new IOExceptionCheckpointStorage()) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); Arrays.stream(graph.getJobVertex(jobVertexID1).getTaskVertices()) .forEach(task -> task.getCurrentExecutionAttempt().markFinished()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.startCheckpointScheduler(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.shutdown(); } @Test public void testExpiredCheckpointExceedsTolerableFailureNumber() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkpointCoordinator.abortPendingCheckpoints( new CheckpointException(CHECKPOINT_EXPIRED)); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, errorMsg); } finally { checkpointCoordinator.shutdown(); } } @Test public void testTriggerAndDeclineSyncCheckpointFailureSimple() throws Exception { testTriggerAndDeclineCheckpointSimple(CHECKPOINT_DECLINED); } @Test public void testTriggerAndDeclineAsyncCheckpointFailureSimple() throws Exception { testTriggerAndDeclineCheckpointSimple(CHECKPOINT_ASYNC_EXCEPTION); } /** * This test triggers a checkpoint and then sends a decline checkpoint message from one of the * tasks. The expected behaviour is that said checkpoint is discarded and a new checkpoint is * triggered. */ private void testTriggerAndDeclineCheckpointSimple( CheckpointFailureReason checkpointFailureReason) throws Exception { final CheckpointException checkpointException = new CheckpointException(checkpointFailureReason); JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); TestFailJobCallback failJobCallback = new TestFailJobCallback(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointFailureManager( new CheckpointFailureManager(0, failJobCallback)) .build(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { CheckpointCoordinatorTestingUtils.TriggeredCheckpoint triggeredCheckpoint = gateway.getOnlyTriggeredCheckpoint( vertex.getCurrentExecutionAttempt().getAttemptId()); assertEquals(checkpointId, triggeredCheckpoint.checkpointId); assertEquals(checkpoint.getCheckpointTimestamp(), triggeredCheckpoint.timestamp); assertEquals( CheckpointOptions.forCheckpointWithDefaultLocation(), triggeredCheckpoint.checkpointOptions); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), "Unknown location"); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), "Unknown location"); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDisposed()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID2, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDisposed()); assertEquals(1, failJobCallback.getInvokeCounter()); checkpointCoordinator.shutdown(); } /** * This test triggers two checkpoints and then sends a decline message from one of the tasks for * the first checkpoint. This should discard the first checkpoint while not triggering a new * checkpoint because a later checkpoint is already in progress. */ @Test public void testTriggerAndDeclineCheckpointComplex() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(2, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); Iterator<Map.Entry<Long, PendingCheckpoint>> it = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator(); long checkpoint1Id = it.next().getKey(); long checkpoint2Id = it.next().getKey(); PendingCheckpoint checkpoint1 = checkpointCoordinator.getPendingCheckpoints().get(checkpoint1Id); PendingCheckpoint checkpoint2 = checkpointCoordinator.getPendingCheckpoints().get(checkpoint2Id); assertNotNull(checkpoint1); assertEquals(checkpoint1Id, checkpoint1.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint1.getJobId()); assertEquals(2, checkpoint1.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint1.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint1.getOperatorStates().size()); assertFalse(checkpoint1.isDisposed()); assertFalse(checkpoint1.areTasksFullyAcknowledged()); assertNotNull(checkpoint2); assertEquals(checkpoint2Id, checkpoint2.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint2.getJobId()); assertEquals(2, checkpoint2.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint2.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint2.getOperatorStates().size()); assertFalse(checkpoint2.isDisposed()); assertFalse(checkpoint2.areTasksFullyAcknowledged()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { List<CheckpointCoordinatorTestingUtils.TriggeredCheckpoint> triggeredCheckpoints = gateway.getTriggeredCheckpoints( vertex.getCurrentExecutionAttempt().getAttemptId()); assertEquals(2, triggeredCheckpoints.size()); assertEquals(checkpoint1Id, triggeredCheckpoints.get(0).checkpointId); assertEquals(checkpoint2Id, triggeredCheckpoints.get(1).checkpointId); } checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { assertEquals( checkpoint1Id, gateway.getOnlyNotifiedAbortedCheckpoint( vertex.getCurrentExecutionAttempt().getAttemptId()) .checkpointId); } assertTrue(checkpoint1.isDisposed()); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); long checkpointIdNew = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpointNew = checkpointCoordinator.getPendingCheckpoints().get(checkpointIdNew); assertEquals(checkpoint2Id, checkpointIdNew); assertNotNull(checkpointNew); assertEquals(checkpointIdNew, checkpointNew.getCheckpointId()); assertEquals(graph.getJobID(), checkpointNew.getJobId()); assertEquals(2, checkpointNew.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpointNew.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpointNew.getOperatorStates().size()); assertFalse(checkpointNew.isDisposed()); assertFalse(checkpointNew.areTasksFullyAcknowledged()); assertNotEquals(checkpoint1.getCheckpointId(), checkpointNew.getCheckpointId()); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID2, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint1.isDisposed()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { assertEquals( 1, gateway.getNotifiedAbortedCheckpoints( vertex.getCurrentExecutionAttempt().getAttemptId()) .size()); } checkpointCoordinator.shutdown(); } @Test public void testTriggerAndConfirmSimpleCheckpoint() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(1, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertNotNull(checkpoint); assertEquals(checkpointId, checkpoint.getCheckpointId()); assertEquals(graph.getJobID(), checkpoint.getJobId()); assertEquals(2, checkpoint.getNumberOfNonAcknowledgedTasks()); assertEquals(0, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(0, checkpoint.getOperatorStates().size()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(singletonMap(opID1, subtaskState1)); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(singletonMap(opID2, subtaskState2)); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertEquals(1, checkpoint.getNumberOfAcknowledgedTasks()); assertEquals(1, checkpoint.getNumberOfNonAcknowledgedTasks()); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(subtaskState2, times(1)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertFalse(checkpoint.isDisposed()); assertFalse(checkpoint.areTasksFullyAcknowledged()); verify(subtaskState2, times(2)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertTrue(checkpoint.isDisposed()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); { verify(subtaskState1, times(1)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); verify(subtaskState2, times(2)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); } for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } CompletedCheckpoint success = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(checkpoint.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); gateway.resetCount(); checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointIdNew = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); CompletedCheckpoint successNew = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), successNew.getJobId()); assertEquals(checkpointIdNew, successNew.getCheckpointID()); assertEquals(2, successNew.getOperatorStates().size()); assertTrue(successNew.getOperatorStates().values().stream().allMatch(this::hasNoSubState)); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointIdNew, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); assertEquals( checkpointIdNew, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } checkpointCoordinator.shutdown(); } @Test public void testMultipleConcurrentCheckpoints() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); JobVertexID jobVertexID3 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .addJobVertex(jobVertexID3, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionVertex vertex3 = graph.getJobVertex(jobVertexID3).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID3 = vertex3.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId1, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId1), TASK_MANAGER_LOCATION_INFO); gateway.resetCount(); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = checkpointCoordinator.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId2, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID3, checkpointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending1.isDisposed()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId1, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } gateway.resetCount(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID3, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(2, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertTrue(pending2.isDisposed()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId2, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } List<CompletedCheckpoint> scs = checkpointCoordinator.getSuccessfulCheckpoints(); CompletedCheckpoint sc1 = scs.get(0); assertEquals(checkpointId1, sc1.getCheckpointID()); assertEquals(graph.getJobID(), sc1.getJobId()); assertEquals(3, sc1.getOperatorStates().size()); assertTrue(sc1.getOperatorStates().values().stream().allMatch(this::hasNoSubState)); CompletedCheckpoint sc2 = scs.get(1); assertEquals(checkpointId2, sc2.getCheckpointID()); assertEquals(graph.getJobID(), sc2.getJobId()); assertEquals(3, sc2.getOperatorStates().size()); assertTrue(sc2.getOperatorStates().values().stream().allMatch(this::hasNoSubState)); checkpointCoordinator.shutdown(); } @Test public void testSuccessfulCheckpointSubsumesUnsuccessful() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); JobVertexID jobVertexID3 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .addJobVertex(jobVertexID3, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionVertex vertex3 = graph.getJobVertex(jobVertexID3).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID3 = vertex3.getCurrentExecutionAttempt().getAttemptId(); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(10); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCompletedCheckpointStore(completedCheckpointStore) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending1 = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointId(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId1, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID3 = vertex3.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates11 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates12 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates13 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState11 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState12 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState13 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates11.putSubtaskStateByOperatorID(opID1, subtaskState11); taskOperatorSubtaskStates12.putSubtaskStateByOperatorID(opID2, subtaskState12); taskOperatorSubtaskStates13.putSubtaskStateByOperatorID(opID3, subtaskState13); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates12), TASK_MANAGER_LOCATION_INFO); gateway.resetCount(); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = checkpointCoordinator.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointId(); TaskStateSnapshot taskOperatorSubtaskStates21 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates22 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates23 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState21 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState22 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState23 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates21.putSubtaskStateByOperatorID(opID1, subtaskState21); taskOperatorSubtaskStates22.putSubtaskStateByOperatorID(opID2, subtaskState22); taskOperatorSubtaskStates23.putSubtaskStateByOperatorID(opID3, subtaskState23); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId2, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID3, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates23), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates21), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates11), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates22), TASK_MANAGER_LOCATION_INFO); assertTrue(pending1.isDisposed()); assertTrue(pending2.isDisposed()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); verify(subtaskState11, times(1)).discardState(); verify(subtaskState12, times(1)).discardState(); verify(subtaskState21, never()).discardState(); verify(subtaskState22, never()).discardState(); verify(subtaskState23, never()).discardState(); List<CompletedCheckpoint> scs = checkpointCoordinator.getSuccessfulCheckpoints(); CompletedCheckpoint success = scs.get(0); assertEquals(checkpointId2, success.getCheckpointID()); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(3, success.getOperatorStates().size()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals( checkpointId2, gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID3, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates13), TASK_MANAGER_LOCATION_INFO); verify(subtaskState13, times(1)).discardState(); checkpointCoordinator.shutdown(); completedCheckpointStore.shutdown(JobStatus.FINISHED, new CheckpointsCleaner()); verify(subtaskState21, times(1)).discardState(); verify(subtaskState22, times(1)).discardState(); verify(subtaskState23, times(1)).discardState(); } @Test public void testCheckpointTimeoutIsolated() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); assertFalse(checkpoint.isDisposed()); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpoint.getCheckpointId(), new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); manuallyTriggeredScheduledExecutor.triggerScheduledTasks(); assertTrue("Checkpoint was not canceled by the timeout", checkpoint.isDisposed()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); verify(subtaskState1, times(1)).discardState(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(0, gateway.getNotifiedCompletedCheckpoints(attemptId).size()); } checkpointCoordinator.shutdown(); } @Test public void testHandleMessagesForNonExistingCheckpoints() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); long checkpointId = checkpointCoordinator.getPendingCheckpoints().keySet().iterator().next(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(new JobID(), attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 1L), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), createExecutionAttemptId(), checkpointId), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.shutdown(); } /** * Tests that late acknowledge checkpoint messages are properly cleaned up. Furthermore it tests * that unknown checkpoint messages for the same job a are cleaned up as well. In contrast * checkpointing messages from other jobs should not be touched. A late acknowledge message is * an acknowledge message which arrives after the checkpoint has been declined. * * @throws Exception */ @Test public void testStateCleanupForLateOrUnknownMessages() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); PendingCheckpoint pendingCheckpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId = pendingCheckpoint.getCheckpointId(); OperatorID opIDtrigger = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStatesTrigger = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskStateTrigger = mock(OperatorSubtaskState.class); taskOperatorSubtaskStatesTrigger.putSubtaskStateByOperatorID( opIDtrigger, subtaskStateTrigger); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStatesTrigger), TASK_MANAGER_LOCATION_INFO); verify(subtaskStateTrigger, never()).discardState(); TaskStateSnapshot unknownSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), unknownSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState, times(1)).discardState(); TaskStateSnapshot differentJobSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( new JobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot triggerSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), triggerSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(triggerSubtaskState, never()).discardState(); reset(subtaskStateTrigger); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); assertTrue(pendingCheckpoint.isDisposed()); verify(subtaskStateTrigger, times(1)).discardState(); TaskStateSnapshot ackSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), ackSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(ackSubtaskState, times(1)).discardState(); reset(differentJobSubtaskState); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( new JobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot unknownSubtaskState2 = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), unknownSubtaskState2), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState2, times(1)).discardState(); } @Test public void testMaxConcurrentAttempts1() { testMaxConcurrentAttempts(1); } @Test public void testMaxConcurrentAttempts2() { testMaxConcurrentAttempts(2); } @Test public void testMaxConcurrentAttempts5() { testMaxConcurrentAttempts(5); } @Test public void testTriggerAndConfirmSimpleSavepoint() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(statsTracker) .build(graph); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); assertFalse(savepointFuture.isDone()); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint pending = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertNotNull(pending); assertEquals(checkpointId, pending.getCheckpointId()); assertEquals(graph.getJobID(), pending.getJobId()); assertEquals(2, pending.getNumberOfNonAcknowledgedTasks()); assertEquals(0, pending.getNumberOfAcknowledgedTasks()); assertEquals(0, pending.getOperatorStates().size()); assertFalse(pending.isDisposed()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(pending.canBeSubsumed()); OperatorID opID1 = OperatorID.fromJobVertexID(vertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(vertex2.getJobvertexId()); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(singletonMap(opID1, subtaskState1)); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(singletonMap(opID2, subtaskState2)); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertEquals(1, pending.getNumberOfAcknowledgedTasks()); assertEquals(1, pending.getNumberOfNonAcknowledgedTasks()); assertFalse(pending.isDisposed()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertFalse(pending.isDisposed()); assertFalse(pending.areTasksFullyAcknowledged()); assertFalse(savepointFuture.isDone()); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertTrue(pending.isDisposed()); assertNotNull(savepointFuture.get()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); assertThat(gateway.getNotifiedCompletedCheckpoints(attemptId)).isEmpty(); } CompletedCheckpoint success = savepointFuture.get(); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(pending.getCheckpointId(), success.getCheckpointID()); assertEquals(2, success.getOperatorStates().size()); AbstractCheckpointStats actualStats = statsTracker.createSnapshot().getHistory().getCheckpointById(checkpointId); assertEquals(checkpointId, actualStats.getCheckpointId()); assertEquals(CheckpointStatsStatus.COMPLETED, actualStats.getStatus()); checkpointCoordinator.shutdown(); } /** * Triggers a savepoint and two checkpoints. The second checkpoint completes and subsumes the * first checkpoint, but not the first savepoint. Then we trigger another checkpoint and * savepoint. The 2nd savepoint completes and subsumes the last checkpoint, but not the first * savepoint. */ @Test public void testSavepointsAreNotSubsumed() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); StandaloneCheckpointIDCounter counter = new StandaloneCheckpointIDCounter(); CheckpointCoordinator checkpointCoordinator = spy( new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCheckpointIDCounter(counter) .setCompletedCheckpointStore( new StandaloneCompletedCheckpointStore(1)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph)); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture1 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId1 = counter.getLast(); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); long checkpointId2 = counter.getLast(); assertEquals(3, checkpointCoordinator.getNumberOfPendingCheckpoints()); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(1)) .sendAcknowledgeMessages( anyList(), eq(checkpointId2), anyLong(), eq(INVALID_CHECKPOINT_ID)); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(checkpointCoordinator.getPendingCheckpoints().get(savepointId1).isDisposed()); assertFalse(savepointFuture1.isDone()); CompletableFuture<CompletedCheckpoint> checkpointFuture3 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture3); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); CompletableFuture<CompletedCheckpoint> savepointFuture2 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId2 = counter.getLast(); FutureUtils.throwIfCompletedExceptionally(savepointFuture2); assertEquals(3, checkpointCoordinator.getNumberOfPendingCheckpoints()); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, savepointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, savepointId2), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(0)) .sendAcknowledgeMessages(anyList(), eq(savepointId2), anyLong(), anyLong()); assertEquals(2, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertFalse(checkpointCoordinator.getPendingCheckpoints().get(savepointId1).isDisposed()); assertFalse(savepointFuture1.isDone()); assertNotNull(savepointFuture2.get()); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, savepointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, savepointId1), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(0)) .sendAcknowledgeMessages(anyList(), eq(savepointId1), anyLong(), anyLong()); assertEquals(1, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertNotNull(savepointFuture1.get()); CompletableFuture<CompletedCheckpoint> checkpointFuture4 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture4); long checkpointId4 = counter.getLast(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId4), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId4), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(1)) .sendAcknowledgeMessages( anyList(), eq(checkpointId4), anyLong(), eq(checkpointId2)); } private void testMaxConcurrentAttempts(int maxConcurrentAttempts) { try { JobVertexID jobVertexID1 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration .CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); checkpointCoordinator.startCheckpointScheduler(); for (int i = 0; i < maxConcurrentAttempts; i++) { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } assertEquals(maxConcurrentAttempts, gateway.getTriggeredCheckpoints(attemptID1).size()); assertEquals(0, gateway.getNotifiedCompletedCheckpoints(attemptID1).size()); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 1L), TASK_MANAGER_LOCATION_INFO); final Collection<ScheduledFuture<?>> periodicScheduledTasks = manuallyTriggeredScheduledExecutor.getActivePeriodicScheduledTask(); assertEquals(1, periodicScheduledTasks.size()); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals( maxConcurrentAttempts + 1, gateway.getTriggeredCheckpoints(attemptID1).size()); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals( maxConcurrentAttempts + 1, gateway.getTriggeredCheckpoints(attemptID1).size()); checkpointCoordinator.shutdown(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testMaxConcurrentAttemptsWithSubsumption() throws Exception { final int maxConcurrentAttempts = 2; JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); checkpointCoordinator.startCheckpointScheduler(); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (checkpointCoordinator.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertEquals(maxConcurrentAttempts, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(1L)); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(2L)); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 2L), TASK_MANAGER_LOCATION_INFO); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (checkpointCoordinator.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertEquals(maxConcurrentAttempts, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(3L)); assertNotNull(checkpointCoordinator.getPendingCheckpoints().get(4L)); checkpointCoordinator.shutdown(); } @Test public void testPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new MemoryStateBackend()); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointCoordinator.getNumberOfPendingCheckpoints() > 0); } private CheckpointCoordinator setupCheckpointCoordinatorWithInactiveTasks( CheckpointStorage checkpointStorage) throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0) .setMaxConcurrentCheckpoints(2) .build(); CheckpointIDCounterWithOwner checkpointIDCounter = new CheckpointIDCounterWithOwner(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setCheckpointStorage(checkpointStorage) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointIDCounter(checkpointIDCounter) .build(graph); checkpointIDCounter.setOwner(checkpointCoordinator); checkpointCoordinator.startCheckpointScheduler(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); vertex1.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); return checkpointCoordinator; } /** Tests that the savepoints can be triggered concurrently. */ @Test public void testConcurrentSavepoints() throws Exception { int numSavepoints = 5; JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); StandaloneCheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints( 1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCheckpointIDCounter(checkpointIDCounter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); List<CompletableFuture<CompletedCheckpoint>> savepointFutures = new ArrayList<>(); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); for (int i = 0; i < numSavepoints; i++) { savepointFutures.add( checkpointCoordinator.triggerSavepoint( savepointDir, SavepointFormatType.CANONICAL)); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertFalse(savepointFuture.isDone()); } manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId = checkpointIDCounter.getLast(); for (int i = 0; i < numSavepoints; i++, checkpointId--) { checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertNotNull(savepointFuture.get()); } } /** Tests that no minimum delay between savepoints is enforced. */ @Test public void testMinDelayBetweenSavepoints() throws Exception { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMinPauseBetweenCheckpoints( 100000000L) .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); String savepointDir = tmpFolder.newFolder().getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepoint0 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); assertFalse("Did not trigger savepoint", savepoint0.isDone()); CompletableFuture<CompletedCheckpoint> savepoint1 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); assertFalse("Did not trigger savepoint", savepoint1.isDone()); } /** Tests that the externalized checkpoint configuration is respected. */ @Test public void testExternalizedCheckpoints() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointRetentionPolicy(CheckpointRetentionPolicy.RETAIN_ON_FAILURE) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); for (PendingCheckpoint checkpoint : checkpointCoordinator.getPendingCheckpoints().values()) { CheckpointProperties props = checkpoint.getProps(); CheckpointProperties expected = CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.RETAIN_ON_FAILURE); assertEquals(expected, props); } checkpointCoordinator.shutdown(); } @Test public void testCreateKeyGroupPartitions() { testCreateKeyGroupPartitions(1, 1); testCreateKeyGroupPartitions(13, 1); testCreateKeyGroupPartitions(13, 2); testCreateKeyGroupPartitions(Short.MAX_VALUE, 1); testCreateKeyGroupPartitions(Short.MAX_VALUE, 13); testCreateKeyGroupPartitions(Short.MAX_VALUE, Short.MAX_VALUE); Random r = new Random(1234); for (int k = 0; k < 1000; ++k) { int maxParallelism = 1 + r.nextInt(Short.MAX_VALUE - 1); int parallelism = 1 + r.nextInt(maxParallelism); testCreateKeyGroupPartitions(maxParallelism, parallelism); } } private void testCreateKeyGroupPartitions(int maxParallelism, int parallelism) { List<KeyGroupRange> ranges = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism, parallelism); for (int i = 0; i < maxParallelism; ++i) { KeyGroupRange range = ranges.get( KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup( maxParallelism, parallelism, i)); if (!range.contains(i)) { Assert.fail("Could not find expected key-group " + i + " in range " + range); } } } @Test public void testPartitionableStateRepartitioning() { Random r = new Random(42); for (int run = 0; run < 10000; ++run) { int oldParallelism = 1 + r.nextInt(9); int newParallelism = 1 + r.nextInt(9); int numNamedStates = 1 + r.nextInt(9); int maxPartitionsPerState = 1 + r.nextInt(9); doTestPartitionableStateRepartitioning( r, oldParallelism, newParallelism, numNamedStates, maxPartitionsPerState); } } private void doTestPartitionableStateRepartitioning( Random r, int oldParallelism, int newParallelism, int numNamedStates, int maxPartitionsPerState) { List<List<OperatorStateHandle>> previousParallelOpInstanceStates = new ArrayList<>(oldParallelism); for (int i = 0; i < oldParallelism; ++i) { Path fakePath = new Path("/fake-" + i); Map<String, OperatorStateHandle.StateMetaInfo> namedStatesToOffsets = new HashMap<>(); int off = 0; for (int s = 0; s < numNamedStates - 1; ++s) { long[] offs = new long[1 + r.nextInt(maxPartitionsPerState)]; for (int o = 0; o < offs.length; ++o) { offs[o] = off; ++off; } OperatorStateHandle.Mode mode = r.nextInt(10) == 0 ? OperatorStateHandle.Mode.UNION : OperatorStateHandle.Mode.SPLIT_DISTRIBUTE; namedStatesToOffsets.put( "State-" + s, new OperatorStateHandle.StateMetaInfo(offs, mode)); } if (numNamedStates % 2 == 0) { long[] offs = {off + 1, off + 2, off + 3, off + 4}; namedStatesToOffsets.put( "State-" + (numNamedStates - 1), new OperatorStateHandle.StateMetaInfo( offs, OperatorStateHandle.Mode.BROADCAST)); } previousParallelOpInstanceStates.add( Collections.singletonList( new OperatorStreamStateHandle( namedStatesToOffsets, new FileStateHandle(fakePath, -1)))); } Map<StreamStateHandle, Map<String, List<Long>>> expected = new HashMap<>(); int taskIndex = 0; int expectedTotalPartitions = 0; for (List<OperatorStateHandle> previousParallelOpInstanceState : previousParallelOpInstanceStates) { Assert.assertEquals(1, previousParallelOpInstanceState.size()); for (OperatorStateHandle psh : previousParallelOpInstanceState) { Map<String, OperatorStateHandle.StateMetaInfo> offsMap = psh.getStateNameToPartitionOffsets(); Map<String, List<Long>> offsMapWithList = new HashMap<>(offsMap.size()); for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> e : offsMap.entrySet()) { long[] offs = e.getValue().getOffsets(); int replication; switch (e.getValue().getDistributionMode()) { case UNION: replication = newParallelism; break; case BROADCAST: int extra = taskIndex < (newParallelism % oldParallelism) ? 1 : 0; replication = newParallelism / oldParallelism + extra; break; case SPLIT_DISTRIBUTE: replication = 1; break; default: throw new RuntimeException( "Unknown distribution mode " + e.getValue().getDistributionMode()); } if (replication > 0) { expectedTotalPartitions += replication * offs.length; List<Long> offsList = new ArrayList<>(offs.length); for (long off : offs) { for (int p = 0; p < replication; ++p) { offsList.add(off); } } offsMapWithList.put(e.getKey(), offsList); } } if (!offsMapWithList.isEmpty()) { expected.put(psh.getDelegateStateHandle(), offsMapWithList); } taskIndex++; } } OperatorStateRepartitioner<OperatorStateHandle> repartitioner = RoundRobinOperatorStateRepartitioner.INSTANCE; List<List<OperatorStateHandle>> pshs = repartitioner.repartitionState( previousParallelOpInstanceStates, oldParallelism, newParallelism); Map<StreamStateHandle, Map<String, List<Long>>> actual = new HashMap<>(); int minCount = Integer.MAX_VALUE; int maxCount = 0; int actualTotalPartitions = 0; for (int p = 0; p < newParallelism; ++p) { int partitionCount = 0; Collection<OperatorStateHandle> pshc = pshs.get(p); for (OperatorStateHandle sh : pshc) { for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> namedState : sh.getStateNameToPartitionOffsets().entrySet()) { Map<String, List<Long>> stateToOffsets = actual.computeIfAbsent( sh.getDelegateStateHandle(), k -> new HashMap<>()); List<Long> actualOffs = stateToOffsets.computeIfAbsent( namedState.getKey(), k -> new ArrayList<>()); long[] add = namedState.getValue().getOffsets(); for (long l : add) { actualOffs.add(l); } partitionCount += namedState.getValue().getOffsets().length; } } minCount = Math.min(minCount, partitionCount); maxCount = Math.max(maxCount, partitionCount); actualTotalPartitions += partitionCount; } for (Map<String, List<Long>> v : actual.values()) { for (List<Long> l : v.values()) { Collections.sort(l); } } if (oldParallelism != newParallelism) { int maxLoadDiff = maxCount - minCount; Assert.assertTrue( "Difference in partition load is > 1 : " + maxLoadDiff, maxLoadDiff <= 1); } Assert.assertEquals(expectedTotalPartitions, actualTotalPartitions); Assert.assertEquals(expected, actual); } /** Tests that the pending checkpoint stats callbacks are created. */ @Test public void testCheckpointStatsTrackerPendingCheckpointCallback() throws Exception { CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(tracker) .build(EXECUTOR_RESOURCE.getExecutor()); when(tracker.reportPendingCheckpoint( anyLong(), anyLong(), any(CheckpointProperties.class), any(Map.class))) .thenReturn(mock(PendingCheckpointStats.class)); CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); verify(tracker, times(1)) .reportPendingCheckpoint( eq(1L), any(Long.class), eq( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION)), any()); } /** Tests that the restore callbacks are called if registered. */ @Test public void testCheckpointStatsTrackerRestoreCallback() throws Exception { StandaloneCompletedCheckpointStore store = new StandaloneCompletedCheckpointStore(1); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(store) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(tracker) .build(EXECUTOR_RESOURCE.getExecutor()); store.addCheckpointAndSubsumeOldestOne( new CompletedCheckpoint( new JobID(), 0, 0, 0, Collections.<OperatorID, OperatorState>emptyMap(), Collections.<MasterState>emptyList(), CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation(), null), new CheckpointsCleaner(), () -> {}); assertTrue( checkpointCoordinator.restoreLatestCheckpointedStateToAll( Collections.emptySet(), true)); verify(tracker, times(1)).reportRestoredCheckpoint(any(RestoredCheckpointStats.class)); } @Test public void testSharedStateRegistrationOnRestore() throws Exception { for (RestoreMode restoreMode : RestoreMode.values()) { JobVertexID jobVertexID1 = new JobVertexID(); int parallelism1 = 2; int maxParallelism1 = 4; ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1, parallelism1, maxParallelism1) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); List<CompletedCheckpoint> checkpoints = Collections.emptyList(); SharedStateRegistry firstInstance = SharedStateRegistry.DEFAULT_FACTORY.create( org.apache.flink.util.concurrent.Executors.directExecutor(), checkpoints, restoreMode); final EmbeddedCompletedCheckpointStore store = new EmbeddedCompletedCheckpointStore(10, checkpoints, firstInstance); final CheckpointCoordinatorBuilder coordinatorBuilder = new CheckpointCoordinatorBuilder().setTimer(manuallyTriggeredScheduledExecutor); final CheckpointCoordinator coordinator = coordinatorBuilder.setCompletedCheckpointStore(store).build(graph); final int numCheckpoints = 3; List<KeyGroupRange> keyGroupPartitions1 = StateAssignmentOperation.createKeyGroupPartitions( maxParallelism1, parallelism1); for (int i = 0; i < numCheckpoints; ++i) { performIncrementalCheckpoint( graph.getJobID(), coordinator, jobVertex1, keyGroupPartitions1, i); } List<CompletedCheckpoint> completedCheckpoints = coordinator.getSuccessfulCheckpoints(); assertEquals(numCheckpoints, completedCheckpoints.size()); int sharedHandleCount = 0; List<Map<StateHandleID, StreamStateHandle>> sharedHandlesByCheckpoint = new ArrayList<>(numCheckpoints); for (int i = 0; i < numCheckpoints; ++i) { sharedHandlesByCheckpoint.add(new HashMap<>(2)); } int cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { verify(keyedStateHandle, times(1)) .registerSharedStates( firstInstance, completedCheckpoint.getCheckpointID()); IncrementalRemoteKeyedStateHandle incrementalKeyedStateHandle = (IncrementalRemoteKeyedStateHandle) keyedStateHandle; sharedHandlesByCheckpoint .get(cp) .putAll(incrementalKeyedStateHandle.getSharedState()); for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getSharedState().values()) { assertFalse( streamStateHandle instanceof PlaceholderStreamStateHandle); verify(streamStateHandle, never()).discardState(); ++sharedHandleCount; } for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getPrivateState().values()) { verify(streamStateHandle, never()).discardState(); } verify(incrementalKeyedStateHandle.getMetaStateHandle(), never()) .discardState(); } verify(subtaskState, never()).discardState(); } } ++cp; } assertEquals(10, sharedHandleCount); store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, never()).discardState(); } } store.shutdown(JobStatus.SUSPENDED, new CheckpointsCleaner()); Set<ExecutionJobVertex> tasks = new HashSet<>(); tasks.add(jobVertex1); assertEquals(JobStatus.SUSPENDED, store.getShutdownStatus().orElse(null)); SharedStateRegistry secondInstance = SharedStateRegistry.DEFAULT_FACTORY.create( org.apache.flink.util.concurrent.Executors.directExecutor(), store.getAllCheckpoints(), restoreMode); final EmbeddedCompletedCheckpointStore secondStore = new EmbeddedCompletedCheckpointStore( 10, store.getAllCheckpoints(), secondInstance); final CheckpointCoordinator secondCoordinator = coordinatorBuilder.setCompletedCheckpointStore(secondStore).build(graph); assertTrue(secondCoordinator.restoreLatestCheckpointedStateToAll(tasks, false)); cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { VerificationMode verificationMode; if (cp > 0) { verificationMode = times(1); } else { verificationMode = never(); } verify(keyedStateHandle, verificationMode) .registerSharedStates( secondInstance, completedCheckpoint.getCheckpointID()); } } } ++cp; } secondStore.removeOldestCheckpoint(); verifyDiscard( sharedHandlesByCheckpoint, cpId -> restoreMode == RestoreMode.CLAIM && cpId == 0 ? times(1) : never()); secondStore.removeOldestCheckpoint(); verifyDiscard(sharedHandlesByCheckpoint, cpId -> cpId == 1 ? never() : atLeast(0)); } } @Test public void jobFailsIfInFlightSynchronousSavepointIsDiscarded() throws Exception { final Tuple2<Integer, Throwable> invocationCounterAndException = Tuple2.of(0, null); final Throwable expectedRootCause = new IOException("Custom-Exception"); JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); final CheckpointCoordinator coordinator = getCheckpointCoordinator( graph, new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { invocationCounterAndException.f0 += 1; invocationCounterAndException.f1 = cause; } @Override public void failJobDueToTaskFailure( Throwable cause, ExecutionAttemptID failingTask) { throw new AssertionError( "This method should not be called for the test."); } })); final CompletableFuture<CompletedCheckpoint> savepointFuture = coordinator.triggerSynchronousSavepoint( false, "test-dir", SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); final PendingCheckpoint syncSavepoint = declineSynchronousSavepoint( graph.getJobID(), coordinator, attemptID1, expectedRootCause); assertTrue(syncSavepoint.isDisposed()); String expectedRootCauseMessage = String.format( "%s: %s", expectedRootCause.getClass().getName(), expectedRootCause.getMessage()); try { savepointFuture.get(); fail("Expected Exception not found."); } catch (ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertTrue(cause instanceof CheckpointException); assertEquals(expectedRootCauseMessage, cause.getCause().getCause().getMessage()); } assertEquals(1L, invocationCounterAndException.f0.intValue()); assertTrue( invocationCounterAndException.f1 instanceof CheckpointException && invocationCounterAndException .f1 .getCause() .getCause() .getMessage() .equals(expectedRootCauseMessage)); coordinator.shutdown(); } /** Tests that do not trigger checkpoint when stop the coordinator after the eager pre-check. */ @Test public void testTriggerCheckpointAfterStopping() throws Exception { StoppingCheckpointIDCounter testingCounter = new StoppingCheckpointIDCounter(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointIDCounter(testingCounter) .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); testingCounter.setOwner(checkpointCoordinator); testTriggerCheckpoint(checkpointCoordinator, PERIODIC_SCHEDULER_SHUTDOWN); } /** Tests that do not trigger checkpoint when CheckpointIDCounter IOException occurred. */ @Test public void testTriggerCheckpointWithCounterIOException() throws Exception { IOExceptionCheckpointIDCounter testingCounter = new IOExceptionCheckpointIDCounter(); TestFailJobCallback failureCallback = new TestFailJobCallback(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointIDCounter(testingCounter) .setFailureManager(new CheckpointFailureManager(0, failureCallback)) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(statsTracker) .build(EXECUTOR_RESOURCE.getExecutor()); testingCounter.setOwner(checkpointCoordinator); testTriggerCheckpoint(checkpointCoordinator, IO_EXCEPTION); assertEquals(1, failureCallback.getInvokeCounter()); CheckpointStatsCounts counts = statsTracker.createSnapshot().getCounts(); assertEquals(0, counts.getNumberOfRestoredCheckpoints()); assertEquals(1, counts.getTotalNumberOfCheckpoints()); assertEquals(0, counts.getNumberOfInProgressCheckpoints()); assertEquals(0, counts.getNumberOfCompletedCheckpoints()); assertEquals(1, counts.getNumberOfFailedCheckpoints()); assertNull(statsTracker.getPendingCheckpointStats(1)); } private void testTriggerCheckpoint( CheckpointCoordinator checkpointCoordinator, CheckpointFailureReason expectedFailureReason) throws Exception { try { checkpointCoordinator.startCheckpointScheduler(); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = checkpointCoordinator.triggerCheckpoint( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint"); } catch (ExecutionException e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); if (!checkpointExceptionOptional.isPresent() || checkpointExceptionOptional.get().getCheckpointFailureReason() != expectedFailureReason) { throw e; } } } finally { checkpointCoordinator.shutdown(); } } @Test public void testSavepointScheduledInUnalignedMode() throws Exception { int maxConcurrentCheckpoints = 1; int checkpointRequestsToSend = 10; int activeRequests = 0; ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setUnalignedCheckpointsEnabled(true) .setMaxConcurrentCheckpoints(maxConcurrentCheckpoints) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); try { List<Future<?>> checkpointFutures = new ArrayList<>(checkpointRequestsToSend); coordinator.startCheckpointScheduler(); while (activeRequests < checkpointRequestsToSend) { checkpointFutures.add(coordinator.triggerCheckpoint(true)); activeRequests++; } manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals( activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); Future<?> savepointFuture = coordinator.triggerSavepoint("/tmp", SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals( ++activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); coordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), createExecutionAttemptId(), 1L, new CheckpointException(CHECKPOINT_DECLINED)), "none"); manuallyTriggeredScheduledExecutor.triggerAll(); activeRequests--; assertEquals( activeRequests - maxConcurrentCheckpoints, coordinator.getNumQueuedRequests()); assertEquals(1, checkpointFutures.stream().filter(Future::isDone).count()); assertFalse(savepointFuture.isDone()); assertEquals(maxConcurrentCheckpoints, coordinator.getNumberOfPendingCheckpoints()); CheckpointProperties props = coordinator.getPendingCheckpoints().values().iterator().next().getProps(); assertTrue(props.isSavepoint()); assertFalse(props.forceCheckpoint()); } finally { coordinator.shutdown(); } } /** * Test that the checkpoint still behave correctly when the task checkpoint is triggered by the * master hooks and finished before the master checkpoint. Also make sure that the operator * coordinators are checkpointed before starting the task checkpoint. */ @Test public void testExternallyInducedSourceWithOperatorCoordinator() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(); OperatorSubtaskState subtaskState1 = OperatorSubtaskState.builder().build(); OperatorSubtaskState subtaskState2 = OperatorSubtaskState.builder().build(); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID2, subtaskState2); AtomicBoolean coordCheckpointDone = new AtomicBoolean(false); OperatorCoordinatorCheckpointContext coordinatorCheckpointContext = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOnCallingCheckpointCoordinator( (checkpointId, result) -> { coordCheckpointDone.set(true); result.complete(new byte[0]); }) .setOperatorID(opID1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint( Collections.singleton(coordinatorCheckpointContext)) .build(graph); AtomicReference<Long> checkpointIdRef = new AtomicReference<>(); checkpointCoordinator.addMasterHook( new MasterTriggerRestoreHook<Integer>() { @Override public String getIdentifier() { return "anything"; } @Override @Nullable public CompletableFuture<Integer> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) throws Exception { assertTrue( "The coordinator checkpoint should have finished.", coordCheckpointDone.get()); checkpointIdRef.set(checkpointId); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); return null; } @Override public void restoreCheckpoint(long checkpointId, Integer checkpointData) {} @Override public SimpleVersionedSerializer<Integer> createCheckpointDataSerializer() { return new SimpleVersionedSerializer<Integer>() { @Override public int getVersion() { return 0; } @Override public byte[] serialize(Integer obj) { return new byte[0]; } @Override public Integer deserialize(int version, byte[] serialized) { return 1; } }; } }); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertEquals(1, checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()); assertEquals(0, checkpointCoordinator.getNumberOfPendingCheckpoints()); assertEquals(0, manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()); long checkpointId = checkpointIdRef.get(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertEquals(checkpointId, gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId); } CompletedCheckpoint success = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertEquals(graph.getJobID(), success.getJobId()); assertEquals(2, success.getOperatorStates().size()); checkpointCoordinator.shutdown(); } @Test public void testCompleteCheckpointFailureWithExternallyInducedSource() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(); OperatorSubtaskState subtaskState1 = OperatorSubtaskState.builder().build(); OperatorSubtaskState subtaskState2 = OperatorSubtaskState.builder().build(); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); taskOperatorSubtaskStates2.putSubtaskStateByOperatorID(opID2, subtaskState2); AtomicBoolean coordCheckpointDone = new AtomicBoolean(false); OperatorCoordinatorCheckpointContext coordinatorCheckpointContext = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOnCallingCheckpointCoordinator( (checkpointId, result) -> { coordCheckpointDone.set(true); result.complete(new byte[0]); }) .setOperatorID(opID1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint( Collections.singleton(coordinatorCheckpointContext)) .setCheckpointStorage( new JobManagerCheckpointStorage() { private static final long serialVersionUID = 8134582566514272546L; @Override public CheckpointStorageAccess createCheckpointStorage( JobID jobId) throws IOException { return new MemoryBackendCheckpointStorageAccess( jobId, null, null, 100) { @Override public CheckpointStorageLocation initializeLocationForCheckpoint( long checkpointId) throws IOException { return new NonPersistentMetadataCheckpointStorageLocation( 1000) { @Override public CheckpointMetadataOutputStream createMetadataOutputStream() throws IOException { throw new IOException( "Artificial Exception"); } }; } }; } }) .build(graph); AtomicReference<Long> checkpointIdRef = new AtomicReference<>(); checkpointCoordinator.addMasterHook( new MasterTriggerRestoreHook<Integer>() { @Override public String getIdentifier() { return "anything"; } @Override @Nullable public CompletableFuture<Integer> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) throws Exception { assertTrue( "The coordinator checkpoint should have finished.", coordCheckpointDone.get()); checkpointIdRef.set(checkpointId); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); return null; } @Override public void restoreCheckpoint(long checkpointId, Integer checkpointData) throws Exception {} @Override public SimpleVersionedSerializer<Integer> createCheckpointDataSerializer() { return new SimpleVersionedSerializer<Integer>() { @Override public int getVersion() { return 0; } @Override public byte[] serialize(Integer obj) { return new byte[0]; } @Override public Integer deserialize(int version, byte[] serialized) { return 1; } }; } }); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertTrue(checkpointFuture.isCompletedExceptionally()); assertTrue(checkpointCoordinator.getSuccessfulCheckpoints().isEmpty()); } @Test public void testResetCalledInRegionRecovery() throws Exception { CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); TestResetHook hook = new TestResetHook("id"); checkpointCoordinator.addMasterHook(hook); assertFalse(hook.resetCalled); checkpointCoordinator.restoreLatestCheckpointedStateToSubtasks(Collections.emptySet()); assertTrue(hook.resetCalled); } @Test public void testNotifyCheckpointAbortionInOperatorCoordinator() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex executionVertex = graph.getJobVertex(jobVertexID).getTaskVertices()[0]; ExecutionAttemptID attemptID = executionVertex.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorTestingUtils.MockOperatorCoordinatorCheckpointContext context = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOperatorID(new OperatorID()) .setOnCallingCheckpointCoordinator( (ignored, future) -> future.complete(new byte[0])) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint(Collections.singleton(context)) .build(graph); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId1 = Collections.max(checkpointCoordinator.getPendingCheckpoints().keySet()); checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId2 = Collections.max(checkpointCoordinator.getPendingCheckpoints().keySet()); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID, checkpointId2, new CheckpointMetrics(), null); checkpointCoordinator.receiveAcknowledgeMessage(acknowledgeCheckpoint1, ""); assertEquals(Collections.singletonList(checkpointId1), context.getAbortedCheckpoints()); assertEquals( Collections.singletonList(checkpointId2), context.getCompletedCheckpoints()); } finally { checkpointCoordinator.shutdown(); } } @Test public void testTimeoutWhileCheckpointOperatorCoordinatorNotFinishing() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinatorTestingUtils.MockOperatorCoordinatorCheckpointContext context = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOperatorID(new OperatorID()) .setOnCallingCheckpointCoordinator( (ignored, future) -> { }) .build(); ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointTimeout(10) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint(Collections.singleton(context)) .build(graph); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); Assert.assertTrue(checkpointCoordinator.isTriggering()); manuallyTriggeredScheduledExecutor.triggerNonPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); Assert.assertFalse(checkpointCoordinator.isTriggering()); } finally { checkpointCoordinator.shutdown(); executorService.shutdownNow(); } } @Test public void testAbortingBeforeTriggeringCheckpointOperatorCoordinator() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); String trigger = "Trigger"; String abort = "Abort"; final List<String> notificationSequence = new ArrayList<>(); CheckpointCoordinatorTestingUtils.MockOperatorCoordinatorCheckpointContext context = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOperatorID(new OperatorID()) .setOnCallingCheckpointCoordinator( (id, future) -> { notificationSequence.add(trigger + id); future.complete(new byte[0]); }) .setOnCallingAbortCurrentTriggering(() -> notificationSequence.add(abort)) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointTimeout(10) .build()) .setIoExecutor(manuallyTriggeredScheduledExecutor) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint(Collections.singleton(context)) .build(graph); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.trigger(); manuallyTriggeredScheduledExecutor.trigger(); manuallyTriggeredScheduledExecutor.trigger(); declineCheckpoint(1L, checkpointCoordinator, jobVertexID, graph); manuallyTriggeredScheduledExecutor.triggerAll(); checkState(!checkpointCoordinator.isTriggering()); checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); Assert.assertTrue( !notificationSequence.contains(trigger + "1") || notificationSequence.indexOf(trigger + "1") < notificationSequence.indexOf(abort)); } finally { checkpointCoordinator.shutdown(); } } @Test public void testReportLatestCompletedCheckpointIdWithAbort() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex task = graph.getJobVertex(jobVertexID).getTaskVertices()[0]; AtomicLong reportedCheckpointId = new AtomicLong(-1); LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public void notifyCheckpointAborted( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long latestCompletedCheckpointId, long timestamp) { reportedCheckpointId.set(latestCompletedCheckpointId); } }) .createTestingLogicalSlot(); ExecutionGraphTestUtils.setVertexResource(task, slot); task.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .build(graph); CompletableFuture<CompletedCheckpoint> result = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long completedCheckpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), task.getCurrentExecutionAttempt().getAttemptId(), completedCheckpointId, new CheckpointMetrics(), new TaskStateSnapshot()), "localhost"); assertTrue(result.isDone()); assertFalse(result.isCompletedExceptionally()); result = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long abortedCheckpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), task.getCurrentExecutionAttempt().getAttemptId(), abortedCheckpointId, new CheckpointException(CHECKPOINT_EXPIRED)), "localhost"); assertTrue(result.isCompletedExceptionally()); assertEquals(completedCheckpointId, reportedCheckpointId.get()); } @Test public void testBaseLocationsNotInitialized() throws Exception { File checkpointDir = tmpFolder.newFolder(); JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); Path jobCheckpointPath = new Path(checkpointDir.getAbsolutePath(), graph.getJobID().toString()); FileSystem fs = FileSystem.get(checkpointDir.toURI()); Assert.assertFalse(fs.exists(jobCheckpointPath)); } private CheckpointCoordinator getCheckpointCoordinator( ExecutionGraph graph, CheckpointFailureManager failureManager) throws Exception { return new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setFailureManager(failureManager) .build(graph); } private CheckpointCoordinator getCheckpointCoordinator(ScheduledExecutor timer) throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); return new CheckpointCoordinatorBuilder().setTimer(timer).build(graph); } private CheckpointFailureManager getCheckpointFailureManager(String errorMsg) { return new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { throw new RuntimeException(errorMsg); } @Override public void failJobDueToTaskFailure( Throwable cause, ExecutionAttemptID failingTask) { throw new RuntimeException(errorMsg); } }); } private PendingCheckpoint declineSynchronousSavepoint( final JobID jobId, final CheckpointCoordinator coordinator, final ExecutionAttemptID attemptID, final Throwable reason) { final long checkpointId = coordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); final PendingCheckpoint checkpoint = coordinator.getPendingCheckpoints().get(checkpointId); coordinator.receiveDeclineMessage( new DeclineCheckpoint( jobId, attemptID, checkpointId, new CheckpointException(CHECKPOINT_DECLINED, reason)), TASK_MANAGER_LOCATION_INFO); return checkpoint; } private void performIncrementalCheckpoint( JobID jobId, CheckpointCoordinator checkpointCoordinator, ExecutionJobVertex jobVertex1, List<KeyGroupRange> keyGroupPartitions1, int cpSequenceNumber) throws Exception { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertEquals(1, checkpointCoordinator.getPendingCheckpoints().size()); long checkpointId = Iterables.getOnlyElement(checkpointCoordinator.getPendingCheckpoints().keySet()); for (int index = 0; index < jobVertex1.getParallelism(); index++) { KeyGroupRange keyGroupRange = keyGroupPartitions1.get(index); Map<StateHandleID, StreamStateHandle> privateState = new HashMap<>(); privateState.put( new StateHandleID("private-1"), spy(new ByteStreamStateHandle("private-1", new byte[] {'p'}))); Map<StateHandleID, StreamStateHandle> sharedState = new HashMap<>(); if (cpSequenceNumber > 0) { sharedState.put( new StateHandleID("shared-" + (cpSequenceNumber - 1)), spy(new PlaceholderStreamStateHandle(1L))); } sharedState.put( new StateHandleID("shared-" + cpSequenceNumber), spy( new ByteStreamStateHandle( "shared-" + cpSequenceNumber + "-" + keyGroupRange, new byte[] {'s'}))); IncrementalRemoteKeyedStateHandle managedState = spy( new IncrementalRemoteKeyedStateHandle( new UUID(42L, 42L), keyGroupRange, checkpointId, sharedState, privateState, spy(new ByteStreamStateHandle("meta", new byte[] {'m'})))); OperatorSubtaskState operatorSubtaskState = spy(OperatorSubtaskState.builder().setManagedKeyedState(managedState).build()); Map<OperatorID, OperatorSubtaskState> opStates = new HashMap<>(); opStates.put( jobVertex1.getOperatorIDs().get(0).getGeneratedOperatorID(), operatorSubtaskState); TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot(opStates); AcknowledgeCheckpoint acknowledgeCheckpoint = new AcknowledgeCheckpoint( jobId, jobVertex1 .getTaskVertices()[index] .getCurrentExecutionAttempt() .getAttemptId(), checkpointId, new CheckpointMetrics(), taskStateSnapshot); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint, TASK_MANAGER_LOCATION_INFO); } } private static class IOExceptionCheckpointIDCounter extends CheckpointIDCounterWithOwner { @Override public long getAndIncrement() throws Exception { checkNotNull(owner); throw new IOException("disk is error!"); } } private static class IOExceptionCheckpointStorage extends JobManagerCheckpointStorage { @Override public CheckpointStorageAccess createCheckpointStorage(JobID jobId) throws IOException { return new MemoryBackendCheckpointStorageAccess(jobId, null, null, 100) { @Override public CheckpointStorageLocation initializeLocationForCheckpoint(long checkpointId) throws IOException { throw new IOException("disk is error!"); } }; } } private static class StoppingCheckpointIDCounter extends CheckpointIDCounterWithOwner { @Override public long getAndIncrement() throws Exception { checkNotNull(owner); owner.stopCheckpointScheduler(); return super.getAndIncrement(); } } private static class CheckpointIDCounterWithOwner extends StandaloneCheckpointIDCounter { protected CheckpointCoordinator owner; void setOwner(CheckpointCoordinator coordinator) { this.owner = checkNotNull(coordinator); } } private static class TestFailJobCallback implements CheckpointFailureManager.FailJobCallback { private int invokeCounter = 0; @Override public void failJob(Throwable cause) { invokeCounter++; } @Override public void failJobDueToTaskFailure( final Throwable cause, final ExecutionAttemptID executionAttemptID) { invokeCounter++; } public int getInvokeCounter() { return invokeCounter; } } private static class TestResetHook implements MasterTriggerRestoreHook<String> { private final String id; boolean resetCalled; TestResetHook(String id) { this.id = id; this.resetCalled = false; } @Override public String getIdentifier() { return id; } @Override public void reset() throws Exception { resetCalled = true; } @Override public CompletableFuture<String> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) { throw new UnsupportedOperationException(); } @Override public void restoreCheckpoint(long checkpointId, @Nullable String checkpointData) throws Exception { throw new UnsupportedOperationException(); } @Override public SimpleVersionedSerializer<String> createCheckpointDataSerializer() { throw new UnsupportedOperationException(); } } private static void verifyDiscard( List<Map<StateHandleID, StreamStateHandle>> sharedHandlesByCheckpoint1, Function<Integer, VerificationMode> checkpointVerify) throws Exception { for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint1) { for (Map.Entry<StateHandleID, StreamStateHandle> entry : cpList.entrySet()) { String key = entry.getKey().getKeyString(); int checkpointID = Integer.parseInt(String.valueOf(key.charAt(key.length() - 1))); VerificationMode verificationMode = checkpointVerify.apply(checkpointID); verify(entry.getValue(), verificationMode).discardState(); } } } private TestingStreamStateHandle handle() { return new TestingStreamStateHandle(); } private void declineCheckpoint( long checkpointId, CheckpointCoordinator coordinator, JobVertexID nackVertexID, ExecutionGraph graph) { coordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), graph.getJobVertex(nackVertexID) .getTaskVertices()[0] .getCurrentExecutionAttempt() .getAttemptId(), checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), "test"); } private void ackCheckpoint( long checkpointId, CheckpointCoordinator coordinator, JobVertexID ackVertexID, ExecutionGraph graph, TestingStreamStateHandle metaState, TestingStreamStateHandle privateState, TestingStreamStateHandle sharedState) throws CheckpointException { Map<StateHandleID, StreamStateHandle> sharedStateMap = new HashMap<>(singletonMap(new StateHandleID("shared-state-key"), sharedState)); Map<StateHandleID, StreamStateHandle> privateStateMap = new HashMap<>(singletonMap(new StateHandleID("private-state-key"), privateState)); ExecutionJobVertex jobVertex = graph.getJobVertex(ackVertexID); OperatorID operatorID = jobVertex.getOperatorIDs().get(0).getGeneratedOperatorID(); coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), jobVertex.getTaskVertices()[0].getCurrentExecutionAttempt().getAttemptId(), checkpointId, new CheckpointMetrics(), new TaskStateSnapshot( singletonMap( operatorID, OperatorSubtaskState.builder() .setManagedKeyedState( new IncrementalRemoteKeyedStateHandle( UUID.randomUUID(), KeyGroupRange.of(0, 9), checkpointId, sharedStateMap, privateStateMap, metaState)) .build()))), "test"); } }
class CheckpointCoordinatorTest extends TestLogger { @RegisterExtension static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE = TestingUtils.defaultExecutorExtension(); @Test void testSharedStateNotDiscaredOnAbort() throws Exception { JobVertexID v1 = new JobVertexID(), v2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(v1) .addJobVertex(v2) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); coordinator.startCheckpointScheduler(); CompletableFuture<CompletedCheckpoint> cpFuture = coordinator.triggerCheckpoint(true); manuallyTriggeredScheduledExecutor.triggerAll(); cpFuture.getNow(null); TestingStreamStateHandle metaState = handle(); TestingStreamStateHandle privateState = handle(); TestingStreamStateHandle sharedState = handle(); ackCheckpoint(1L, coordinator, v1, graph, metaState, privateState, sharedState); declineCheckpoint(1L, coordinator, v2, graph); assertThat(privateState.isDisposed()).isTrue(); assertThat(metaState.isDisposed()).isTrue(); assertThat(sharedState.isDisposed()).isFalse(); cpFuture = coordinator.triggerCheckpoint(true); manuallyTriggeredScheduledExecutor.triggerAll(); cpFuture.getNow(null); ackCheckpoint(2L, coordinator, v1, graph, handle(), handle(), handle()); ackCheckpoint(2L, coordinator, v2, graph, handle(), handle(), handle()); cpFuture.get(); assertThat(sharedState.isDisposed()).isTrue(); } @Test void testAbortedCheckpointStatsUpdatedAfterFailure() throws Exception { testReportStatsAfterFailure( 1L, (coordinator, execution, metrics) -> { coordinator.reportStats(1L, execution.getAttemptId(), metrics); return null; }); } @Test void testCheckpointStatsUpdatedAfterFailure() throws Exception { testReportStatsAfterFailure( 1L, (coordinator, execution, metrics) -> coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( execution.getVertex().getJobId(), execution.getAttemptId(), 1L, metrics, new TaskStateSnapshot()), TASK_MANAGER_LOCATION_INFO)); } private void testReportStatsAfterFailure( long checkpointId, TriFunctionWithException< CheckpointCoordinator, Execution, CheckpointMetrics, ?, CheckpointException> reportFn) throws Exception { JobVertexID decliningVertexID = new JobVertexID(); JobVertexID lateReportVertexID = new JobVertexID(); ExecutionGraph executionGraph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(decliningVertexID) .addJobVertex(lateReportVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex decliningVertex = executionGraph.getJobVertex(decliningVertexID).getTaskVertices()[0]; ExecutionVertex lateReportVertex = executionGraph.getJobVertex(lateReportVertexID).getTaskVertices()[0]; CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(statsTracker) .build(executionGraph); CompletableFuture<CompletedCheckpoint> result = coordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkState( coordinator.getNumberOfPendingCheckpoints() == 1, "wrong number of pending checkpoints: %s", coordinator.getNumberOfPendingCheckpoints()); if (result.isDone()) { result.get(); } coordinator.receiveDeclineMessage( new DeclineCheckpoint( executionGraph.getJobID(), decliningVertex.getCurrentExecutionAttempt().getAttemptId(), checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), "test"); CheckpointMetrics lateReportedMetrics = new CheckpointMetricsBuilder() .setTotalBytesPersisted(18) .setBytesPersistedOfThisCheckpoint(18) .setBytesProcessedDuringAlignment(19) .setAsyncDurationMillis(20) .setAlignmentDurationNanos(123 * 1_000_000) .setCheckpointStartDelayNanos(567 * 1_000_000) .build(); reportFn.apply( coordinator, lateReportVertex.getCurrentExecutionAttempt(), lateReportedMetrics); assertStatsEqual( checkpointId, lateReportVertex.getJobvertexId(), 0, lateReportedMetrics, statsTracker.createSnapshot().getHistory().getCheckpointById(checkpointId)); } private boolean hasNoSubState(OperatorState s) { return s.getNumberCollectedStates() == 0; } private void assertStatsEqual( long checkpointId, JobVertexID jobVertexID, int subtasIdx, CheckpointMetrics expected, AbstractCheckpointStats actual) { assertThat(actual.getCheckpointId()).isEqualTo(checkpointId); assertThat(actual.getStatus()).isEqualTo(CheckpointStatsStatus.FAILED); assertThat(actual.getNumberOfAcknowledgedSubtasks()).isZero(); assertStatsMetrics(jobVertexID, subtasIdx, expected, actual); } public static void assertStatsMetrics( JobVertexID jobVertexID, int subtasIdx, CheckpointMetrics expected, AbstractCheckpointStats actual) { assertThat(actual.getStateSize()).isEqualTo(expected.getTotalBytesPersisted()); SubtaskStateStats taskStats = actual.getAllTaskStateStats().stream() .filter(s -> s.getJobVertexId().equals(jobVertexID)) .findAny() .get() .getSubtaskStats()[subtasIdx]; assertThat(taskStats.getAlignmentDuration()) .isEqualTo(expected.getAlignmentDurationNanos() / 1_000_000); assertThat(taskStats.getUnalignedCheckpoint()).isEqualTo(expected.getUnalignedCheckpoint()); assertThat(taskStats.getAsyncCheckpointDuration()) .isEqualTo(expected.getAsyncDurationMillis()); assertThat(taskStats.getAlignmentDuration()) .isEqualTo(expected.getAlignmentDurationNanos() / 1_000_000); assertThat(taskStats.getCheckpointStartDelay()) .isEqualTo(expected.getCheckpointStartDelayNanos() / 1_000_000); } private static final String TASK_MANAGER_LOCATION_INFO = "Unknown location"; private ManuallyTriggeredScheduledExecutor manuallyTriggeredScheduledExecutor; @TempDir private java.nio.file.Path tmpFolder; @BeforeEach void setUp() { manuallyTriggeredScheduledExecutor = new ManuallyTriggeredScheduledExecutor(); } @Test void testScheduleTriggerRequestDuringShutdown() throws Exception { ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator coordinator = getCheckpointCoordinator(new ScheduledExecutorServiceAdapter(executor)); coordinator.shutdown(); executor.shutdownNow(); coordinator.scheduleTriggerRequest(); } @Test void testMinCheckpointPause() throws Exception { ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator coordinator = null; try { int pause = 1000; JobVertexID jobVertexId = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexId) .setMainThreadExecutor( ComponentMainThreadExecutorServiceAdapter .forSingleThreadExecutor( new DirectScheduledExecutorService())) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex = graph.getJobVertex(jobVertexId).getTaskVertices()[0]; ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); coordinator = new CheckpointCoordinatorBuilder() .setTimer(new ScheduledExecutorServiceAdapter(executorService)) .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointInterval(pause) .setCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(1) .setMinPauseBetweenCheckpoints(pause) .build()) .build(graph); coordinator.startCheckpointScheduler(); coordinator.triggerCheckpoint( true); coordinator.triggerCheckpoint( true); while (coordinator.getPendingCheckpoints().values().stream() .noneMatch(pc -> pc.getCheckpointStorageLocation() != null)) { Thread.sleep(10); } coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptId, 1L), TASK_MANAGER_LOCATION_INFO); Thread.sleep(pause / 2); assertThat(coordinator.getNumberOfPendingCheckpoints()).isZero(); while (coordinator.getNumberOfPendingCheckpoints() == 0) { Thread.sleep(1); } } finally { if (coordinator != null) { coordinator.shutdown(); } executorService.shutdownNow(); } } @Test void testCheckpointAbortsIfTriggerTasksAreNotExecuted() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID(), false) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointFuture).isCompletedExceptionally(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); checkpointCoordinator.shutdown(); } @Test void testCheckpointAbortsIfTriggerTasksAreFinished() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); Arrays.stream(graph.getJobVertex(jobVertexID1).getTaskVertices()) .forEach(task -> task.getCurrentExecutionAttempt().markFinished()); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointFuture).isCompletedExceptionally(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); checkpointCoordinator.shutdown(); } @Test void testCheckpointTriggeredAfterSomeTasksFinishedIfAllowed() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1, 3, 256) .addJobVertex(jobVertexID2, 3, 256) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); ExecutionJobVertex jobVertex2 = graph.getJobVertex(jobVertexID2); jobVertex1.getTaskVertices()[0].getCurrentExecutionAttempt().markFinished(); jobVertex1.getTaskVertices()[1].getCurrentExecutionAttempt().markFinished(); jobVertex2.getTaskVertices()[1].getCurrentExecutionAttempt().markFinished(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .setCheckpointStatsTracker(statsTracker) .build(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointFuture.isDone()).isFalse(); assertThat(checkpointFuture.isCompletedExceptionally()).isFalse(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); PendingCheckpoint pendingCheckpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); AbstractCheckpointStats checkpointStats = statsTracker .createSnapshot() .getHistory() .getCheckpointById(pendingCheckpoint.getCheckpointID()); assertThat(checkpointStats.getNumberOfAcknowledgedSubtasks()).isEqualTo(3); for (ExecutionVertex task : Arrays.asList( jobVertex1.getTaskVertices()[0], jobVertex1.getTaskVertices()[1], jobVertex2.getTaskVertices()[1])) { assertThat( checkpointStats.getTaskStateStats(task.getJobvertexId()) .getSubtaskStats()[task.getParallelSubtaskIndex()]) .isNotNull(); } } @Test void testTasksFinishDuringTriggering() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .setTransitToRunning(false) .addJobVertex(jobVertexID1, 1, 256) .addJobVertex(jobVertexID2, 1, 256) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); ExecutionVertex taskVertex = jobVertex1.getTaskVertices()[0]; ExecutionJobVertex jobVertex2 = graph.getJobVertex(jobVertexID2); ExecutionVertex taskVertex2 = jobVertex2.getTaskVertices()[0]; AtomicBoolean checkpointAborted = new AtomicBoolean(false); LogicalSlot slot1 = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public CompletableFuture<Acknowledge> triggerCheckpoint( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long timestamp, CheckpointOptions checkpointOptions) { taskVertex.getCurrentExecutionAttempt().markFinished(); return FutureUtils.completedExceptionally( new RpcException("")); } }) .createTestingLogicalSlot(); LogicalSlot slot2 = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public void notifyCheckpointAborted( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long latestCompletedCheckpointId, long timestamp) { checkpointAborted.set(true); } }) .createTestingLogicalSlot(); ExecutionGraphTestUtils.setVertexResource(taskVertex, slot1); taskVertex.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); ExecutionGraphTestUtils.setVertexResource(taskVertex2, slot2); taskVertex2.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .build(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointFuture).isCompletedExceptionally(); assertThat(checkpointAborted.get()).isTrue(); } @Test void testTriggerAndDeclineCheckpointThenFailureManagerThrowsException() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; final ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); final ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { final CompletableFuture<CompletedCheckpoint> checkPointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkPointFuture); long checkpointId = checkpointCoordinator .getPendingCheckpoints() .entrySet() .iterator() .next() .getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), TASK_MANAGER_LOCATION_INFO); assertThat(checkpoint.isDisposed()).isFalse(); assertThat(checkpoint.areTasksFullyAcknowledged()).isFalse(); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, errorMsg); } finally { checkpointCoordinator.shutdown(); } } @Test void testIOExceptionCheckpointExceedsTolerableFailureNumber() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); final String expectedErrorMessage = "Expected Error Message"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(expectedErrorMessage); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkpointCoordinator.abortPendingCheckpoints(new CheckpointException(IO_EXCEPTION)); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, expectedErrorMessage); } finally { checkpointCoordinator.shutdown(); } } @Test void testIOExceptionForPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new IOExceptionCheckpointStorage()); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = checkpointCoordinator.triggerCheckpoint( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint after IOException occurred."); } catch (Exception e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); if (!checkpointExceptionOptional.isPresent() || checkpointExceptionOptional.get().getCheckpointFailureReason() != IO_EXCEPTION) { throw e; } } } /** Tests that do not trigger checkpoint when IOException occurred. */ @Test void testTriggerCheckpointAfterCheckpointStorageIOException() throws Exception { TestFailJobCallback failureCallback = new TestFailJobCallback(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointStatsTracker(statsTracker) .setFailureManager(new CheckpointFailureManager(0, failureCallback)) .setCheckpointStorage(new IOExceptionCheckpointStorage()) .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); testTriggerCheckpoint(checkpointCoordinator, IO_EXCEPTION); assertThat(failureCallback.getInvokeCounter()).isOne(); assertThat(statsTracker.getPendingCheckpointStats(1)).isNotNull(); } @Test void testCheckpointAbortsIfTriggerTasksAreFinishedAndIOException() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointStorage(new IOExceptionCheckpointStorage()) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); Arrays.stream(graph.getJobVertex(jobVertexID1).getTaskVertices()) .forEach(task -> task.getCurrentExecutionAttempt().markFinished()); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); checkpointCoordinator.startCheckpointScheduler(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointFuture).isCompletedExceptionally(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); checkpointCoordinator.shutdown(); } @Test void testExpiredCheckpointExceedsTolerableFailureNumber() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); final String errorMsg = "Exceeded checkpoint failure tolerance number!"; CheckpointFailureManager checkpointFailureManager = getCheckpointFailureManager(errorMsg); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph, checkpointFailureManager); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); checkpointCoordinator.abortPendingCheckpoints( new CheckpointException(CHECKPOINT_EXPIRED)); fail("Test failed."); } catch (Exception e) { ExceptionUtils.assertThrowableWithMessage(e, errorMsg); } finally { checkpointCoordinator.shutdown(); } } @Test void testTriggerAndDeclineSyncCheckpointFailureSimple() throws Exception { testTriggerAndDeclineCheckpointSimple(CHECKPOINT_DECLINED); } @Test void testTriggerAndDeclineAsyncCheckpointFailureSimple() throws Exception { testTriggerAndDeclineCheckpointSimple(CHECKPOINT_ASYNC_EXCEPTION); } /** * This test triggers a checkpoint and then sends a decline checkpoint message from one of the * tasks. The expected behaviour is that said checkpoint is discarded and a new checkpoint is * triggered. */ private void testTriggerAndDeclineCheckpointSimple( CheckpointFailureReason checkpointFailureReason) throws Exception { final CheckpointException checkpointException = new CheckpointException(checkpointFailureReason); JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); TestFailJobCallback failJobCallback = new TestFailJobCallback(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointFailureManager( new CheckpointFailureManager(0, failJobCallback)) .build(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).hasSize(1); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertThat(checkpoint).isNotNull(); assertThat(checkpoint.getCheckpointID()).isEqualTo(checkpointId); assertThat(checkpoint.getJobId()).isEqualTo(graph.getJobID()); assertThat(checkpoint.getNumberOfNonAcknowledgedTasks()).isEqualTo(2); assertThat(checkpoint.getNumberOfAcknowledgedTasks()).isZero(); assertThat(checkpoint.getOperatorStates().size()).isZero(); assertThat(checkpoint.isDisposed()).isFalse(); assertThat(checkpoint.areTasksFullyAcknowledged()).isFalse(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { CheckpointCoordinatorTestingUtils.TriggeredCheckpoint triggeredCheckpoint = gateway.getOnlyTriggeredCheckpoint( vertex.getCurrentExecutionAttempt().getAttemptId()); assertThat(triggeredCheckpoint.checkpointId).isEqualTo(checkpointId); assertThat(triggeredCheckpoint.timestamp) .isEqualTo(checkpoint.getCheckpointTimestamp()); assertThat(triggeredCheckpoint.checkpointOptions) .isEqualTo(CheckpointOptions.forCheckpointWithDefaultLocation()); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), "Unknown location"); assertThat(checkpoint.getNumberOfAcknowledgedTasks()).isOne(); assertThat(checkpoint.getNumberOfNonAcknowledgedTasks()).isOne(); assertThat(checkpoint.isDisposed()).isFalse(); assertThat(checkpoint.areTasksFullyAcknowledged()).isFalse(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId), "Unknown location"); assertThat(checkpoint.isDisposed()).isFalse(); assertThat(checkpoint.areTasksFullyAcknowledged()).isFalse(); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); assertThat(checkpoint.isDisposed()).isTrue(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()).isZero(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID2, checkpointId, checkpointException), TASK_MANAGER_LOCATION_INFO); assertThat(checkpoint.isDisposed()).isTrue(); assertThat(failJobCallback.getInvokeCounter()).isOne(); checkpointCoordinator.shutdown(); } /** * This test triggers two checkpoints and then sends a decline message from one of the tasks for * the first checkpoint. This should discard the first checkpoint while not triggering a new * checkpoint because a later checkpoint is already in progress. */ @Test void testTriggerAndDeclineCheckpointComplex() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).isEmpty(); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(2); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).hasSize(2); Iterator<Map.Entry<Long, PendingCheckpoint>> it = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator(); long checkpoint1Id = it.next().getKey(); long checkpoint2Id = it.next().getKey(); PendingCheckpoint checkpoint1 = checkpointCoordinator.getPendingCheckpoints().get(checkpoint1Id); PendingCheckpoint checkpoint2 = checkpointCoordinator.getPendingCheckpoints().get(checkpoint2Id); assertThat(checkpoint1).isNotNull(); assertThat(checkpoint1.getCheckpointID()).isEqualTo(checkpoint1Id); assertThat(checkpoint1.getJobId()).isEqualTo(graph.getJobID()); assertThat(checkpoint1.getNumberOfNonAcknowledgedTasks()).isEqualTo(2); assertThat(checkpoint1.getNumberOfAcknowledgedTasks()).isZero(); assertThat(checkpoint1.getOperatorStates()).isEmpty(); assertThat(checkpoint1.isDisposed()).isFalse(); assertThat(checkpoint1.areTasksFullyAcknowledged()).isFalse(); assertThat(checkpoint2).isNotNull(); assertThat(checkpoint2.getCheckpointID()).isEqualTo(checkpoint2Id); assertThat(checkpoint2.getJobId()).isEqualTo(graph.getJobID()); assertThat(checkpoint2.getNumberOfNonAcknowledgedTasks()).isEqualTo(2); assertThat(checkpoint2.getNumberOfAcknowledgedTasks()).isZero(); assertThat(checkpoint2.getOperatorStates()).isEmpty(); assertThat(checkpoint2.isDisposed()).isFalse(); assertThat(checkpoint2.areTasksFullyAcknowledged()).isFalse(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { List<CheckpointCoordinatorTestingUtils.TriggeredCheckpoint> triggeredCheckpoints = gateway.getTriggeredCheckpoints( vertex.getCurrentExecutionAttempt().getAttemptId()); assertThat(triggeredCheckpoints).hasSize(2); assertThat(triggeredCheckpoints.get(0).checkpointId).isEqualTo(checkpoint1Id); assertThat(triggeredCheckpoints.get(1).checkpointId).isEqualTo(checkpoint2Id); } checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { assertThat( gateway.getOnlyNotifiedAbortedCheckpoint( vertex.getCurrentExecutionAttempt().getAttemptId()) .checkpointId) .isEqualTo(checkpoint1Id); } assertThat(checkpoint1.isDisposed()).isTrue(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).hasSize(1); long checkpointIdNew = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpointNew = checkpointCoordinator.getPendingCheckpoints().get(checkpointIdNew); assertThat(checkpointIdNew).isEqualTo(checkpoint2Id); assertThat(checkpointNew).isNotNull(); assertThat(checkpointNew.getCheckpointID()).isEqualTo(checkpointIdNew); assertThat(checkpointNew.getJobId()).isEqualTo(graph.getJobID()); assertThat(checkpointNew.getNumberOfNonAcknowledgedTasks()).isEqualTo(2); assertThat(checkpointNew.getNumberOfAcknowledgedTasks()).isZero(); assertThat(checkpointNew.getOperatorStates()).isEmpty(); assertThat(checkpointNew.isDisposed()).isFalse(); assertThat(checkpointNew.areTasksFullyAcknowledged()).isFalse(); assertThat(checkpointNew.getCheckpointID()).isNotEqualTo(checkpoint1.getCheckpointID()); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID2, checkpoint1Id, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); assertThat(checkpoint1.isDisposed()).isTrue(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { assertThat( gateway.getNotifiedAbortedCheckpoints( vertex.getCurrentExecutionAttempt().getAttemptId())) .hasSize(1); } checkpointCoordinator.shutdown(); } @Test void testTriggerAndConfirmSimpleCheckpoint() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = getCheckpointCoordinator(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).isEmpty(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).hasSize(1); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertThat(checkpoint).isNotNull(); assertThat(checkpoint.getCheckpointID()).isEqualTo(checkpointId); assertThat(checkpoint.getJobId()).isEqualTo(graph.getJobID()); assertThat(checkpoint.getNumberOfNonAcknowledgedTasks()).isEqualTo(2); assertThat(checkpoint.getNumberOfAcknowledgedTasks()).isZero(); assertThat(checkpoint.getOperatorStates()).isEmpty(); assertThat(checkpoint.isDisposed()).isFalse(); assertThat(checkpoint.areTasksFullyAcknowledged()).isFalse(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId); } OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(singletonMap(opID1, subtaskState1)); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(singletonMap(opID2, subtaskState2)); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertThat(checkpoint.getNumberOfAcknowledgedTasks()).isOne(); assertThat(checkpoint.getNumberOfNonAcknowledgedTasks()).isOne(); assertThat(checkpoint.isDisposed()).isFalse(); assertThat(checkpoint.areTasksFullyAcknowledged()).isFalse(); verify(subtaskState2, times(1)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); assertThat(checkpoint.isDisposed()).isFalse(); assertThat(checkpoint.areTasksFullyAcknowledged()).isFalse(); verify(subtaskState2, times(2)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertThat(checkpoint.isDisposed()).isTrue(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).isEmpty(); { verify(subtaskState1, times(1)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); verify(subtaskState2, times(2)) .registerSharedStates(any(SharedStateRegistry.class), eq(checkpointId)); } for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId); } CompletedCheckpoint success = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertThat(success.getJobId()).isEqualTo(graph.getJobID()); assertThat(success.getCheckpointID()).isEqualTo(checkpoint.getCheckpointID()); assertThat(success.getOperatorStates()).hasSize(2); gateway.resetCount(); checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointIdNew = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointIdNew), TASK_MANAGER_LOCATION_INFO); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).isEmpty(); CompletedCheckpoint successNew = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertThat(successNew.getJobId()).isEqualTo(graph.getJobID()); assertThat(successNew.getCheckpointID()).isEqualTo(checkpointIdNew); assertThat(successNew.getOperatorStates()).hasSize(2); assertThat(successNew.getOperatorStates().values().stream().allMatch(this::hasNoSubState)) .isTrue(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointIdNew); assertThat(gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointIdNew); } checkpointCoordinator.shutdown(); } @Test void testMultipleConcurrentCheckpoints() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); JobVertexID jobVertexID3 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .addJobVertex(jobVertexID3, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionVertex vertex3 = graph.getJobVertex(jobVertexID3).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID3 = vertex3.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); PendingCheckpoint pending1 = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointID(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId1); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId1), TASK_MANAGER_LOCATION_INFO); gateway.resetCount(); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(2); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = checkpointCoordinator.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointID(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId2); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID3, checkpointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); assertThat(pending1.isDisposed()).isTrue(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId1); } gateway.resetCount(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID3, checkpointId2), TASK_MANAGER_LOCATION_INFO); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isEqualTo(2); assertThat(pending2.isDisposed()).isTrue(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId2); } List<CompletedCheckpoint> scs = checkpointCoordinator.getSuccessfulCheckpoints(); CompletedCheckpoint sc1 = scs.get(0); assertThat(sc1.getCheckpointID()).isEqualTo(checkpointId1); assertThat(sc1.getJobId()).isEqualTo(graph.getJobID()); assertThat(sc1.getOperatorStates()).hasSize(3); assertThat(sc1.getOperatorStates().values()).allMatch(this::hasNoSubState); CompletedCheckpoint sc2 = scs.get(1); assertThat(sc2.getCheckpointID()).isEqualTo(checkpointId2); assertThat(sc2.getJobId()).isEqualTo(graph.getJobID()); assertThat(sc2.getOperatorStates()).hasSize(3); assertThat(sc2.getOperatorStates().values()).allMatch(this::hasNoSubState); checkpointCoordinator.shutdown(); } @Test void testSuccessfulCheckpointSubsumesUnsuccessful() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); JobVertexID jobVertexID3 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .addJobVertex(jobVertexID3, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionVertex vertex3 = graph.getJobVertex(jobVertexID3).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID3 = vertex3.getCurrentExecutionAttempt().getAttemptId(); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(10); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCompletedCheckpointStore(completedCheckpointStore) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); PendingCheckpoint pending1 = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId1 = pending1.getCheckpointID(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId1); } OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID3 = vertex3.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates11 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates12 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates13 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState11 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState12 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState13 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates11.putSubtaskStateByOperatorID(opID1, subtaskState11); taskOperatorSubtaskStates12.putSubtaskStateByOperatorID(opID2, subtaskState12); taskOperatorSubtaskStates13.putSubtaskStateByOperatorID(opID3, subtaskState13); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates12), TASK_MANAGER_LOCATION_INFO); gateway.resetCount(); final CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(2); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); PendingCheckpoint pending2; { Iterator<PendingCheckpoint> all = checkpointCoordinator.getPendingCheckpoints().values().iterator(); PendingCheckpoint cc1 = all.next(); PendingCheckpoint cc2 = all.next(); pending2 = pending1 == cc1 ? cc2 : cc1; } long checkpointId2 = pending2.getCheckpointID(); TaskStateSnapshot taskOperatorSubtaskStates21 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates22 = spy(new TaskStateSnapshot()); TaskStateSnapshot taskOperatorSubtaskStates23 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState21 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState22 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState23 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates21.putSubtaskStateByOperatorID(opID1, subtaskState21); taskOperatorSubtaskStates22.putSubtaskStateByOperatorID(opID2, subtaskState22); taskOperatorSubtaskStates23.putSubtaskStateByOperatorID(opID3, subtaskState23); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId2); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID3, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates23), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates21), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates11), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId2, new CheckpointMetrics(), taskOperatorSubtaskStates22), TASK_MANAGER_LOCATION_INFO); assertThat(pending1.isDisposed()).isTrue(); assertThat(pending2.isDisposed()).isTrue(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); verify(subtaskState11, times(1)).discardState(); verify(subtaskState12, times(1)).discardState(); verify(subtaskState21, never()).discardState(); verify(subtaskState22, never()).discardState(); verify(subtaskState23, never()).discardState(); List<CompletedCheckpoint> scs = checkpointCoordinator.getSuccessfulCheckpoints(); CompletedCheckpoint success = scs.get(0); assertThat(success.getCheckpointID()).isEqualTo(checkpointId2); assertThat(success.getJobId()).isEqualTo(graph.getJobID()); assertThat(success.getOperatorStates()).hasSize(3); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2, vertex3)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyNotifiedCompletedCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId2); } checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID3, checkpointId1, new CheckpointMetrics(), taskOperatorSubtaskStates13), TASK_MANAGER_LOCATION_INFO); verify(subtaskState13, times(1)).discardState(); checkpointCoordinator.shutdown(); completedCheckpointStore.shutdown(JobStatus.FINISHED, new CheckpointsCleaner()); verify(subtaskState21, times(1)).discardState(); verify(subtaskState22, times(1)).discardState(); verify(subtaskState23, times(1)).discardState(); } @Test void testCheckpointTimeoutIsolated() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); PendingCheckpoint checkpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); assertThat(checkpoint.isDisposed()).isFalse(); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpoint.getCheckpointID(), new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); manuallyTriggeredScheduledExecutor.triggerScheduledTasks(); assertThat(checkpoint.isDisposed()) .as("Checkpoint was not canceled by the timeout") .isTrue(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); verify(subtaskState1, times(1)).discardState(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getNotifiedCompletedCheckpoints(attemptId)).isEmpty(); } checkpointCoordinator.shutdown(); } @Test void testHandleMessagesForNonExistingCheckpoints() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); long checkpointId = checkpointCoordinator.getPendingCheckpoints().keySet().iterator().next(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(new JobID(), attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 1L), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), createExecutionAttemptId(), checkpointId), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.shutdown(); } /** * Tests that late acknowledge checkpoint messages are properly cleaned up. Furthermore it tests * that unknown checkpoint messages for the same job a are cleaned up as well. In contrast * checkpointing messages from other jobs should not be touched. A late acknowledge message is * an acknowledge message which arrives after the checkpoint has been declined. * * @throws Exception */ @Test void testStateCleanupForLateOrUnknownMessages() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2, false) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); PendingCheckpoint pendingCheckpoint = checkpointCoordinator.getPendingCheckpoints().values().iterator().next(); long checkpointId = pendingCheckpoint.getCheckpointID(); OperatorID opIDtrigger = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStatesTrigger = spy(new TaskStateSnapshot()); OperatorSubtaskState subtaskStateTrigger = mock(OperatorSubtaskState.class); taskOperatorSubtaskStatesTrigger.putSubtaskStateByOperatorID( opIDtrigger, subtaskStateTrigger); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStatesTrigger), TASK_MANAGER_LOCATION_INFO); verify(subtaskStateTrigger, never()).discardState(); TaskStateSnapshot unknownSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), unknownSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState, times(1)).discardState(); TaskStateSnapshot differentJobSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( new JobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot triggerSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), triggerSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(triggerSubtaskState, never()).discardState(); reset(subtaskStateTrigger); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), TASK_MANAGER_LOCATION_INFO); assertThat(pendingCheckpoint.isDisposed()).isTrue(); verify(subtaskStateTrigger, times(1)).discardState(); TaskStateSnapshot ackSubtaskState = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), ackSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(ackSubtaskState, times(1)).discardState(); reset(differentJobSubtaskState); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( new JobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), differentJobSubtaskState), TASK_MANAGER_LOCATION_INFO); verify(differentJobSubtaskState, never()).discardState(); TaskStateSnapshot unknownSubtaskState2 = mock(TaskStateSnapshot.class); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), createExecutionAttemptId(), checkpointId, new CheckpointMetrics(), unknownSubtaskState2), TASK_MANAGER_LOCATION_INFO); verify(unknownSubtaskState2, times(1)).discardState(); } @Test void testMaxConcurrentAttempts1() { testMaxConcurrentAttempts(1); } @Test void testMaxConcurrentAttempts2() { testMaxConcurrentAttempts(2); } @Test void testMaxConcurrentAttempts5() { testMaxConcurrentAttempts(5); } @Test void testTriggerAndConfirmSimpleSavepoint() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setAlignedCheckpointTimeout(Long.MAX_VALUE) .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(statsTracker) .build(graph); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); String savepointDir = TempDirUtils.newFolder(tmpFolder).getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(savepointFuture).isNotDone(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); long checkpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); PendingCheckpoint pending = checkpointCoordinator.getPendingCheckpoints().get(checkpointId); assertThat(pending).isNotNull(); assertThat(pending.getCheckpointID()).isEqualTo(checkpointId); assertThat(pending.getJobId()).isEqualTo(graph.getJobID()); assertThat(pending.getNumberOfNonAcknowledgedTasks()).isEqualTo(2); assertThat(pending.getNumberOfAcknowledgedTasks()).isZero(); assertThat(pending.getOperatorStates()).isEmpty(); assertThat(pending.isDisposed()).isFalse(); assertThat(pending.areTasksFullyAcknowledged()).isFalse(); assertThat(pending.canBeSubsumed()).isFalse(); OperatorID opID1 = OperatorID.fromJobVertexID(vertex1.getJobvertexId()); OperatorID opID2 = OperatorID.fromJobVertexID(vertex2.getJobvertexId()); OperatorSubtaskState subtaskState1 = mock(OperatorSubtaskState.class); OperatorSubtaskState subtaskState2 = mock(OperatorSubtaskState.class); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(singletonMap(opID1, subtaskState1)); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(singletonMap(opID2, subtaskState2)); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertThat(pending.getNumberOfAcknowledgedTasks()).isOne(); assertThat(pending.getNumberOfNonAcknowledgedTasks()).isOne(); assertThat(pending.isDisposed()).isFalse(); assertThat(pending.areTasksFullyAcknowledged()).isFalse(); assertThat(savepointFuture.isDone()).isFalse(); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); assertThat(pending.isDisposed()).isFalse(); assertThat(pending.areTasksFullyAcknowledged()).isFalse(); assertThat(savepointFuture).isNotDone(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1), TASK_MANAGER_LOCATION_INFO); assertThat(pending.isDisposed()).isTrue(); assertThat(savepointFuture.get()).isNotNull(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId); assertThat(gateway.getNotifiedCompletedCheckpoints(attemptId)).isEmpty(); } CompletedCheckpoint success = savepointFuture.get(); assertThat(success.getJobId()).isEqualTo(graph.getJobID()); assertThat(success.getCheckpointID()).isEqualTo(pending.getCheckpointID()); assertThat(success.getOperatorStates()).hasSize(2); AbstractCheckpointStats actualStats = statsTracker.createSnapshot().getHistory().getCheckpointById(checkpointId); assertThat(actualStats.getCheckpointId()).isEqualTo(checkpointId); assertThat(actualStats.getStatus()).isEqualTo(CheckpointStatsStatus.COMPLETED); checkpointCoordinator.shutdown(); } /** * Triggers a savepoint and two checkpoints. The second checkpoint completes and subsumes the * first checkpoint, but not the first savepoint. Then we trigger another checkpoint and * savepoint. The 2nd savepoint completes and subsumes the last checkpoint, but not the first * savepoint. */ @Test void testSavepointsAreNotSubsumed() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); StandaloneCheckpointIDCounter counter = new StandaloneCheckpointIDCounter(); CheckpointCoordinator checkpointCoordinator = spy( new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setCheckpointIDCounter(counter) .setCompletedCheckpointStore( new StandaloneCompletedCheckpointStore(1)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph)); String savepointDir = TempDirUtils.newFolder(tmpFolder).getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepointFuture1 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId1 = counter.getLast(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); CompletableFuture<CompletedCheckpoint> checkpointFuture1 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(2); FutureUtils.throwIfCompletedExceptionally(checkpointFuture1); CompletableFuture<CompletedCheckpoint> checkpointFuture2 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture2); long checkpointId2 = counter.getLast(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(3); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId2), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(1)) .sendAcknowledgeMessages( anyList(), eq(checkpointId2), anyLong(), eq(INVALID_CHECKPOINT_ID)); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); assertThat(checkpointCoordinator.getPendingCheckpoints().get(savepointId1).isDisposed()) .isFalse(); assertThat(savepointFuture1).isNotDone(); CompletableFuture<CompletedCheckpoint> checkpointFuture3 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture3); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(2); CompletableFuture<CompletedCheckpoint> savepointFuture2 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); long savepointId2 = counter.getLast(); FutureUtils.throwIfCompletedExceptionally(savepointFuture2); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(3); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, savepointId2), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, savepointId2), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(0)) .sendAcknowledgeMessages(anyList(), eq(savepointId2), anyLong(), anyLong()); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isEqualTo(2); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); assertThat(checkpointCoordinator.getPendingCheckpoints().get(savepointId1).isDisposed()) .isFalse(); assertThat(savepointFuture1).isNotDone(); assertThat(savepointFuture2).isCompletedWithValueMatching(Objects::nonNull); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, savepointId1), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, savepointId1), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(0)) .sendAcknowledgeMessages(anyList(), eq(savepointId1), anyLong(), anyLong()); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); assertThat(savepointFuture1).isCompletedWithValueMatching(Objects::nonNull); CompletableFuture<CompletedCheckpoint> checkpointFuture4 = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture4); long checkpointId4 = counter.getLast(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId4), TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID2, checkpointId4), TASK_MANAGER_LOCATION_INFO); verify(checkpointCoordinator, times(1)) .sendAcknowledgeMessages( anyList(), eq(checkpointId4), anyLong(), eq(checkpointId2)); } private void testMaxConcurrentAttempts(int maxConcurrentAttempts) { try { JobVertexID jobVertexID1 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration .CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); checkpointCoordinator.startCheckpointScheduler(); for (int i = 0; i < maxConcurrentAttempts; i++) { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } assertThat(gateway.getTriggeredCheckpoints(attemptID1).size()) .isEqualTo(maxConcurrentAttempts); assertThat(gateway.getNotifiedCompletedCheckpoints(attemptID1).size()).isZero(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 1L), TASK_MANAGER_LOCATION_INFO); final Collection<ScheduledFuture<?>> periodicScheduledTasks = manuallyTriggeredScheduledExecutor.getActivePeriodicScheduledTask(); assertThat(periodicScheduledTasks.size()).isOne(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(gateway.getTriggeredCheckpoints(attemptID1)) .hasSize(maxConcurrentAttempts + 1); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(gateway.getTriggeredCheckpoints(attemptID1)) .hasSize(maxConcurrentAttempts + 1); checkpointCoordinator.shutdown(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test void testMaxConcurrentAttemptsWithSubsumption() throws Exception { final int maxConcurrentAttempts = 2; JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0L) .setMaxConcurrentCheckpoints(maxConcurrentAttempts) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); checkpointCoordinator.startCheckpointScheduler(); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (checkpointCoordinator.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()) .isEqualTo(maxConcurrentAttempts); assertThat(checkpointCoordinator.getPendingCheckpoints()).containsKey(1L); assertThat(checkpointCoordinator.getPendingCheckpoints()).containsKey(2L); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, 2L), TASK_MANAGER_LOCATION_INFO); do { manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); } while (checkpointCoordinator.getNumberOfPendingCheckpoints() < maxConcurrentAttempts); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()) .isEqualTo(maxConcurrentAttempts); assertThat(checkpointCoordinator.getPendingCheckpoints()).containsKey(3L); assertThat(checkpointCoordinator.getPendingCheckpoints()).containsKey(4L); checkpointCoordinator.shutdown(); } @Test void testPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new MemoryStateBackend()); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isGreaterThan(0); } private CheckpointCoordinator setupCheckpointCoordinatorWithInactiveTasks( CheckpointStorage checkpointStorage) throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointInterval(10) .setCheckpointTimeout(200000) .setMinPauseBetweenCheckpoints(0) .setMaxConcurrentCheckpoints(2) .build(); CheckpointIDCounterWithOwner checkpointIDCounter = new CheckpointIDCounterWithOwner(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setCheckpointStorage(checkpointStorage) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointIDCounter(checkpointIDCounter) .build(graph); checkpointIDCounter.setOwner(checkpointCoordinator); checkpointCoordinator.startCheckpointScheduler(); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); vertex1.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); manuallyTriggeredScheduledExecutor.triggerPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); return checkpointCoordinator; } /** Tests that the savepoints can be triggered concurrently. */ @Test void testConcurrentSavepoints() throws Exception { int numSavepoints = 5; JobVertexID jobVertexID1 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); StandaloneCheckpointIDCounter checkpointIDCounter = new StandaloneCheckpointIDCounter(); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMaxConcurrentCheckpoints( 1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCheckpointIDCounter(checkpointIDCounter) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); List<CompletableFuture<CompletedCheckpoint>> savepointFutures = new ArrayList<>(); String savepointDir = TempDirUtils.newFolder(tmpFolder).getAbsolutePath(); for (int i = 0; i < numSavepoints; i++) { savepointFutures.add( checkpointCoordinator.triggerSavepoint( savepointDir, SavepointFormatType.CANONICAL)); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertThat(savepointFuture).isNotDone(); } manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId = checkpointIDCounter.getLast(); for (int i = 0; i < numSavepoints; i++, checkpointId--) { checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint(graph.getJobID(), attemptID1, checkpointId), TASK_MANAGER_LOCATION_INFO); } for (CompletableFuture<CompletedCheckpoint> savepointFuture : savepointFutures) { assertThat(savepointFuture).isCompletedWithValueMatching(Objects::nonNull); } } /** Tests that no minimum delay between savepoints is enforced. */ @Test void testMinDelayBetweenSavepoints() throws Exception { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMinPauseBetweenCheckpoints( 100000000L) .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); String savepointDir = TempDirUtils.newFolder(tmpFolder).getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepoint0 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); assertThat(savepoint0).as("Did not trigger savepoint").isNotDone(); CompletableFuture<CompletedCheckpoint> savepoint1 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); assertThat(savepoint1).as("Did not trigger savepoint").isNotDone(); } /** Tests that the externalized checkpoint configuration is respected. */ @Test void testExternalizedCheckpoints() throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setCheckpointRetentionPolicy(CheckpointRetentionPolicy.RETAIN_ON_FAILURE) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); for (PendingCheckpoint checkpoint : checkpointCoordinator.getPendingCheckpoints().values()) { CheckpointProperties props = checkpoint.getProps(); CheckpointProperties expected = CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.RETAIN_ON_FAILURE); assertThat(props).isEqualTo(expected); } checkpointCoordinator.shutdown(); } @Test void testCreateKeyGroupPartitions() { testCreateKeyGroupPartitions(1, 1); testCreateKeyGroupPartitions(13, 1); testCreateKeyGroupPartitions(13, 2); testCreateKeyGroupPartitions(Short.MAX_VALUE, 1); testCreateKeyGroupPartitions(Short.MAX_VALUE, 13); testCreateKeyGroupPartitions(Short.MAX_VALUE, Short.MAX_VALUE); Random r = new Random(1234); for (int k = 0; k < 1000; ++k) { int maxParallelism = 1 + r.nextInt(Short.MAX_VALUE - 1); int parallelism = 1 + r.nextInt(maxParallelism); testCreateKeyGroupPartitions(maxParallelism, parallelism); } } private void testCreateKeyGroupPartitions(int maxParallelism, int parallelism) { List<KeyGroupRange> ranges = StateAssignmentOperation.createKeyGroupPartitions(maxParallelism, parallelism); for (int i = 0; i < maxParallelism; ++i) { KeyGroupRange range = ranges.get( KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup( maxParallelism, parallelism, i)); if (!range.contains(i)) { fail("Could not find expected key-group " + i + " in range " + range); } } } @Test void testPartitionableStateRepartitioning() { Random r = new Random(42); for (int run = 0; run < 10000; ++run) { int oldParallelism = 1 + r.nextInt(9); int newParallelism = 1 + r.nextInt(9); int numNamedStates = 1 + r.nextInt(9); int maxPartitionsPerState = 1 + r.nextInt(9); doTestPartitionableStateRepartitioning( r, oldParallelism, newParallelism, numNamedStates, maxPartitionsPerState); } } private void doTestPartitionableStateRepartitioning( Random r, int oldParallelism, int newParallelism, int numNamedStates, int maxPartitionsPerState) { List<List<OperatorStateHandle>> previousParallelOpInstanceStates = new ArrayList<>(oldParallelism); for (int i = 0; i < oldParallelism; ++i) { Path fakePath = new Path("/fake-" + i); Map<String, OperatorStateHandle.StateMetaInfo> namedStatesToOffsets = new HashMap<>(); int off = 0; for (int s = 0; s < numNamedStates - 1; ++s) { long[] offs = new long[1 + r.nextInt(maxPartitionsPerState)]; for (int o = 0; o < offs.length; ++o) { offs[o] = off; ++off; } OperatorStateHandle.Mode mode = r.nextInt(10) == 0 ? OperatorStateHandle.Mode.UNION : OperatorStateHandle.Mode.SPLIT_DISTRIBUTE; namedStatesToOffsets.put( "State-" + s, new OperatorStateHandle.StateMetaInfo(offs, mode)); } if (numNamedStates % 2 == 0) { long[] offs = {off + 1, off + 2, off + 3, off + 4}; namedStatesToOffsets.put( "State-" + (numNamedStates - 1), new OperatorStateHandle.StateMetaInfo( offs, OperatorStateHandle.Mode.BROADCAST)); } previousParallelOpInstanceStates.add( Collections.singletonList( new OperatorStreamStateHandle( namedStatesToOffsets, new FileStateHandle(fakePath, -1)))); } Map<StreamStateHandle, Map<String, List<Long>>> expected = new HashMap<>(); int taskIndex = 0; int expectedTotalPartitions = 0; for (List<OperatorStateHandle> previousParallelOpInstanceState : previousParallelOpInstanceStates) { assertThat(previousParallelOpInstanceState.size()).isOne(); for (OperatorStateHandle psh : previousParallelOpInstanceState) { Map<String, OperatorStateHandle.StateMetaInfo> offsMap = psh.getStateNameToPartitionOffsets(); Map<String, List<Long>> offsMapWithList = new HashMap<>(offsMap.size()); for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> e : offsMap.entrySet()) { long[] offs = e.getValue().getOffsets(); int replication; switch (e.getValue().getDistributionMode()) { case UNION: replication = newParallelism; break; case BROADCAST: int extra = taskIndex < (newParallelism % oldParallelism) ? 1 : 0; replication = newParallelism / oldParallelism + extra; break; case SPLIT_DISTRIBUTE: replication = 1; break; default: throw new RuntimeException( "Unknown distribution mode " + e.getValue().getDistributionMode()); } if (replication > 0) { expectedTotalPartitions += replication * offs.length; List<Long> offsList = new ArrayList<>(offs.length); for (long off : offs) { for (int p = 0; p < replication; ++p) { offsList.add(off); } } offsMapWithList.put(e.getKey(), offsList); } } if (!offsMapWithList.isEmpty()) { expected.put(psh.getDelegateStateHandle(), offsMapWithList); } taskIndex++; } } OperatorStateRepartitioner<OperatorStateHandle> repartitioner = RoundRobinOperatorStateRepartitioner.INSTANCE; List<List<OperatorStateHandle>> pshs = repartitioner.repartitionState( previousParallelOpInstanceStates, oldParallelism, newParallelism); Map<StreamStateHandle, Map<String, List<Long>>> actual = new HashMap<>(); int minCount = Integer.MAX_VALUE; int maxCount = 0; int actualTotalPartitions = 0; for (int p = 0; p < newParallelism; ++p) { int partitionCount = 0; Collection<OperatorStateHandle> pshc = pshs.get(p); for (OperatorStateHandle sh : pshc) { for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> namedState : sh.getStateNameToPartitionOffsets().entrySet()) { Map<String, List<Long>> stateToOffsets = actual.computeIfAbsent( sh.getDelegateStateHandle(), k -> new HashMap<>()); List<Long> actualOffs = stateToOffsets.computeIfAbsent( namedState.getKey(), k -> new ArrayList<>()); long[] add = namedState.getValue().getOffsets(); for (long l : add) { actualOffs.add(l); } partitionCount += namedState.getValue().getOffsets().length; } } minCount = Math.min(minCount, partitionCount); maxCount = Math.max(maxCount, partitionCount); actualTotalPartitions += partitionCount; } for (Map<String, List<Long>> v : actual.values()) { for (List<Long> l : v.values()) { Collections.sort(l); } } if (oldParallelism != newParallelism) { int maxLoadDiff = maxCount - minCount; assertThat(maxLoadDiff <= 1) .as("Difference in partition load is > 1 : " + maxLoadDiff) .isTrue(); } assertThat(actualTotalPartitions).isEqualTo(expectedTotalPartitions); assertThat(actual).isEqualTo(expected); } /** Tests that the pending checkpoint stats callbacks are created. */ @Test void testCheckpointStatsTrackerPendingCheckpointCallback() throws Exception { CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(tracker) .build(EXECUTOR_RESOURCE.getExecutor()); when(tracker.reportPendingCheckpoint( anyLong(), anyLong(), any(CheckpointProperties.class), any(Map.class))) .thenReturn(mock(PendingCheckpointStats.class)); CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); verify(tracker, times(1)) .reportPendingCheckpoint( eq(1L), any(Long.class), eq( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION)), any()); } /** Tests that the restore callbacks are called if registered. */ @Test void testCheckpointStatsTrackerRestoreCallback() throws Exception { StandaloneCompletedCheckpointStore store = new StandaloneCompletedCheckpointStore(1); CheckpointStatsTracker tracker = mock(CheckpointStatsTracker.class); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCompletedCheckpointStore(store) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(tracker) .build(EXECUTOR_RESOURCE.getExecutor()); store.addCheckpointAndSubsumeOldestOne( new CompletedCheckpoint( new JobID(), 0, 0, 0, Collections.<OperatorID, OperatorState>emptyMap(), Collections.<MasterState>emptyList(), CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation(), null), new CheckpointsCleaner(), () -> {}); assertThat( checkpointCoordinator.restoreLatestCheckpointedStateToAll( Collections.emptySet(), true)) .isTrue(); verify(tracker, times(1)).reportRestoredCheckpoint(any(RestoredCheckpointStats.class)); } @Test void testSharedStateRegistrationOnRestore() throws Exception { for (RestoreMode restoreMode : RestoreMode.values()) { JobVertexID jobVertexID1 = new JobVertexID(); int parallelism1 = 2; int maxParallelism1 = 4; ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1, parallelism1, maxParallelism1) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionJobVertex jobVertex1 = graph.getJobVertex(jobVertexID1); List<CompletedCheckpoint> checkpoints = Collections.emptyList(); SharedStateRegistry firstInstance = SharedStateRegistry.DEFAULT_FACTORY.create( org.apache.flink.util.concurrent.Executors.directExecutor(), checkpoints, restoreMode); final EmbeddedCompletedCheckpointStore store = new EmbeddedCompletedCheckpointStore(10, checkpoints, firstInstance); final CheckpointCoordinatorBuilder coordinatorBuilder = new CheckpointCoordinatorBuilder().setTimer(manuallyTriggeredScheduledExecutor); final CheckpointCoordinator coordinator = coordinatorBuilder.setCompletedCheckpointStore(store).build(graph); final int numCheckpoints = 3; List<KeyGroupRange> keyGroupPartitions1 = StateAssignmentOperation.createKeyGroupPartitions( maxParallelism1, parallelism1); for (int i = 0; i < numCheckpoints; ++i) { performIncrementalCheckpoint( graph.getJobID(), coordinator, jobVertex1, keyGroupPartitions1, i); } List<CompletedCheckpoint> completedCheckpoints = coordinator.getSuccessfulCheckpoints(); assertThat(completedCheckpoints.size()).isEqualTo(numCheckpoints); int sharedHandleCount = 0; List<Map<StateHandleID, StreamStateHandle>> sharedHandlesByCheckpoint = new ArrayList<>(numCheckpoints); for (int i = 0; i < numCheckpoints; ++i) { sharedHandlesByCheckpoint.add(new HashMap<>(2)); } int cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { verify(keyedStateHandle, times(1)) .registerSharedStates( firstInstance, completedCheckpoint.getCheckpointID()); IncrementalRemoteKeyedStateHandle incrementalKeyedStateHandle = (IncrementalRemoteKeyedStateHandle) keyedStateHandle; sharedHandlesByCheckpoint .get(cp) .putAll(incrementalKeyedStateHandle.getSharedState()); for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getSharedState().values()) { assertThat( streamStateHandle instanceof PlaceholderStreamStateHandle) .isFalse(); verify(streamStateHandle, never()).discardState(); ++sharedHandleCount; } for (StreamStateHandle streamStateHandle : incrementalKeyedStateHandle.getPrivateState().values()) { verify(streamStateHandle, never()).discardState(); } verify(incrementalKeyedStateHandle.getMetaStateHandle(), never()) .discardState(); } verify(subtaskState, never()).discardState(); } } ++cp; } assertThat(sharedHandleCount).isEqualTo(10); store.removeOldestCheckpoint(); for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint) { for (StreamStateHandle streamStateHandle : cpList.values()) { verify(streamStateHandle, never()).discardState(); } } store.shutdown(JobStatus.SUSPENDED, new CheckpointsCleaner()); Set<ExecutionJobVertex> tasks = new HashSet<>(); tasks.add(jobVertex1); assertThat(store.getShutdownStatus().orElse(null)).isEqualTo(JobStatus.SUSPENDED); SharedStateRegistry secondInstance = SharedStateRegistry.DEFAULT_FACTORY.create( org.apache.flink.util.concurrent.Executors.directExecutor(), store.getAllCheckpoints(), restoreMode); final EmbeddedCompletedCheckpointStore secondStore = new EmbeddedCompletedCheckpointStore( 10, store.getAllCheckpoints(), secondInstance); final CheckpointCoordinator secondCoordinator = coordinatorBuilder.setCompletedCheckpointStore(secondStore).build(graph); assertThat(secondCoordinator.restoreLatestCheckpointedStateToAll(tasks, false)) .isTrue(); cp = 0; for (CompletedCheckpoint completedCheckpoint : completedCheckpoints) { for (OperatorState taskState : completedCheckpoint.getOperatorStates().values()) { for (OperatorSubtaskState subtaskState : taskState.getStates()) { for (KeyedStateHandle keyedStateHandle : subtaskState.getManagedKeyedState()) { VerificationMode verificationMode; if (cp > 0) { verificationMode = times(1); } else { verificationMode = never(); } verify(keyedStateHandle, verificationMode) .registerSharedStates( secondInstance, completedCheckpoint.getCheckpointID()); } } } ++cp; } secondStore.removeOldestCheckpoint(); verifyDiscard( sharedHandlesByCheckpoint, cpId -> restoreMode == RestoreMode.CLAIM && cpId == 0 ? times(1) : never()); secondStore.removeOldestCheckpoint(); verifyDiscard(sharedHandlesByCheckpoint, cpId -> cpId == 1 ? never() : atLeast(0)); } } @Test void jobFailsIfInFlightSynchronousSavepointIsDiscarded() throws Exception { final Tuple2<Integer, Throwable> invocationCounterAndException = Tuple2.of(0, null); final Throwable expectedRootCause = new IOException("Custom-Exception"); JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); final CheckpointCoordinator coordinator = getCheckpointCoordinator( graph, new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { invocationCounterAndException.f0 += 1; invocationCounterAndException.f1 = cause; } @Override public void failJobDueToTaskFailure( Throwable cause, ExecutionAttemptID failingTask) { throw new AssertionError( "This method should not be called for the test."); } })); final CompletableFuture<CompletedCheckpoint> savepointFuture = coordinator.triggerSynchronousSavepoint( false, "test-dir", SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); final PendingCheckpoint syncSavepoint = declineSynchronousSavepoint( graph.getJobID(), coordinator, attemptID1, expectedRootCause); assertThat(syncSavepoint.isDisposed()).isTrue(); String expectedRootCauseMessage = String.format( "%s: %s", expectedRootCause.getClass().getName(), expectedRootCause.getMessage()); try { savepointFuture.get(); fail("Expected Exception not found."); } catch (ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertThat(cause instanceof CheckpointException).isTrue(); assertThat(cause.getCause().getCause().getMessage()) .isEqualTo(expectedRootCauseMessage); } assertThat(invocationCounterAndException.f0.intValue()).isEqualTo(1L); assertThat( invocationCounterAndException.f1 instanceof CheckpointException && invocationCounterAndException .f1 .getCause() .getCause() .getMessage() .equals(expectedRootCauseMessage)) .isTrue(); coordinator.shutdown(); } /** Tests that do not trigger checkpoint when stop the coordinator after the eager pre-check. */ @Test void testTriggerCheckpointAfterStopping() throws Exception { StoppingCheckpointIDCounter testingCounter = new StoppingCheckpointIDCounter(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointIDCounter(testingCounter) .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); testingCounter.setOwner(checkpointCoordinator); testTriggerCheckpoint(checkpointCoordinator, PERIODIC_SCHEDULER_SHUTDOWN); } /** Tests that do not trigger checkpoint when CheckpointIDCounter IOException occurred. */ @Test void testTriggerCheckpointWithCounterIOException() throws Exception { IOExceptionCheckpointIDCounter testingCounter = new IOExceptionCheckpointIDCounter(); TestFailJobCallback failureCallback = new TestFailJobCallback(); CheckpointStatsTracker statsTracker = new CheckpointStatsTracker(Integer.MAX_VALUE, new UnregisteredMetricsGroup()); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointIDCounter(testingCounter) .setFailureManager(new CheckpointFailureManager(0, failureCallback)) .setTimer(manuallyTriggeredScheduledExecutor) .setCheckpointStatsTracker(statsTracker) .build(EXECUTOR_RESOURCE.getExecutor()); testingCounter.setOwner(checkpointCoordinator); testTriggerCheckpoint(checkpointCoordinator, IO_EXCEPTION); assertThat(failureCallback.getInvokeCounter()).isOne(); CheckpointStatsCounts counts = statsTracker.createSnapshot().getCounts(); assertThat(counts.getNumberOfRestoredCheckpoints()).isZero(); assertThat(counts.getTotalNumberOfCheckpoints()).isOne(); assertThat(counts.getNumberOfInProgressCheckpoints()).isZero(); assertThat(counts.getNumberOfCompletedCheckpoints()).isZero(); assertThat(counts.getNumberOfFailedCheckpoints()).isOne(); assertThat(statsTracker.getPendingCheckpointStats(1)).isNull(); } private void testTriggerCheckpoint( CheckpointCoordinator checkpointCoordinator, CheckpointFailureReason expectedFailureReason) throws Exception { try { checkpointCoordinator.startCheckpointScheduler(); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = checkpointCoordinator.triggerCheckpoint( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint"); } catch (ExecutionException e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); if (!checkpointExceptionOptional.isPresent() || checkpointExceptionOptional.get().getCheckpointFailureReason() != expectedFailureReason) { throw e; } } } finally { checkpointCoordinator.shutdown(); } } @Test void testSavepointScheduledInUnalignedMode() throws Exception { int maxConcurrentCheckpoints = 1; int checkpointRequestsToSend = 10; int activeRequests = 0; ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinator coordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setUnalignedCheckpointsEnabled(true) .setMaxConcurrentCheckpoints(maxConcurrentCheckpoints) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .build(graph); try { List<Future<?>> checkpointFutures = new ArrayList<>(checkpointRequestsToSend); coordinator.startCheckpointScheduler(); while (activeRequests < checkpointRequestsToSend) { checkpointFutures.add(coordinator.triggerCheckpoint(true)); activeRequests++; } manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(coordinator.getNumQueuedRequests()) .isEqualTo(activeRequests - maxConcurrentCheckpoints); Future<?> savepointFuture = coordinator.triggerSavepoint("/tmp", SavepointFormatType.CANONICAL); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(coordinator.getNumQueuedRequests()) .isEqualTo(++activeRequests - maxConcurrentCheckpoints); coordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), createExecutionAttemptId(), 1L, new CheckpointException(CHECKPOINT_DECLINED)), "none"); manuallyTriggeredScheduledExecutor.triggerAll(); activeRequests--; assertThat(coordinator.getNumQueuedRequests()) .isEqualTo(activeRequests - maxConcurrentCheckpoints); assertThat(checkpointFutures.stream().filter(Future::isDone).count()).isOne(); assertThat(savepointFuture.isDone()).isFalse(); assertThat(coordinator.getNumberOfPendingCheckpoints()) .isEqualTo(maxConcurrentCheckpoints); CheckpointProperties props = coordinator.getPendingCheckpoints().values().iterator().next().getProps(); assertThat(props.isSavepoint()).isTrue(); assertThat(props.forceCheckpoint()).isFalse(); } finally { coordinator.shutdown(); } } /** * Test that the checkpoint still behave correctly when the task checkpoint is triggered by the * master hooks and finished before the master checkpoint. Also make sure that the operator * coordinators are checkpointed before starting the task checkpoint. */ @Test void testExternallyInducedSourceWithOperatorCoordinator() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway gateway = new CheckpointCoordinatorTestingUtils.CheckpointRecorderTaskManagerGateway(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .setTaskManagerGateway(gateway) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(); OperatorSubtaskState subtaskState1 = OperatorSubtaskState.builder().build(); OperatorSubtaskState subtaskState2 = OperatorSubtaskState.builder().build(); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID2, subtaskState2); AtomicBoolean coordCheckpointDone = new AtomicBoolean(false); OperatorCoordinatorCheckpointContext coordinatorCheckpointContext = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOnCallingCheckpointCoordinator( (checkpointId, result) -> { coordCheckpointDone.set(true); result.complete(new byte[0]); }) .setOperatorID(opID1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint( Collections.singleton(coordinatorCheckpointContext)) .build(graph); AtomicReference<Long> checkpointIdRef = new AtomicReference<>(); checkpointCoordinator.addMasterHook( new MasterTriggerRestoreHook<Integer>() { @Override public String getIdentifier() { return "anything"; } @Override @Nullable public CompletableFuture<Integer> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) throws Exception { assertThat(coordCheckpointDone.get()) .as("The coordinator checkpoint should have finished.") .isTrue(); checkpointIdRef.set(checkpointId); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); return null; } @Override public void restoreCheckpoint(long checkpointId, Integer checkpointData) {} @Override public SimpleVersionedSerializer<Integer> createCheckpointDataSerializer() { return new SimpleVersionedSerializer<Integer>() { @Override public int getVersion() { return 0; } @Override public byte[] serialize(Integer obj) { return new byte[0]; } @Override public Integer deserialize(int version, byte[] serialized) { return 1; } }; } }); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks().size()).isZero(); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); FutureUtils.throwIfCompletedExceptionally(checkpointFuture); assertThat(checkpointCoordinator.getNumberOfRetainedSuccessfulCheckpoints()).isOne(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isZero(); assertThat(manuallyTriggeredScheduledExecutor.getActiveScheduledTasks()).isEmpty(); long checkpointId = checkpointIdRef.get(); for (ExecutionVertex vertex : Arrays.asList(vertex1, vertex2)) { ExecutionAttemptID attemptId = vertex.getCurrentExecutionAttempt().getAttemptId(); assertThat(gateway.getOnlyTriggeredCheckpoint(attemptId).checkpointId) .isEqualTo(checkpointId); } CompletedCheckpoint success = checkpointCoordinator.getSuccessfulCheckpoints().get(0); assertThat(success.getJobId()).isEqualTo(graph.getJobID()); assertThat(success.getOperatorStates().size()).isEqualTo(2); checkpointCoordinator.shutdown(); } @Test void testCompleteCheckpointFailureWithExternallyInducedSource() throws Exception { JobVertexID jobVertexID1 = new JobVertexID(); JobVertexID jobVertexID2 = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID1) .addJobVertex(jobVertexID2) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex vertex1 = graph.getJobVertex(jobVertexID1).getTaskVertices()[0]; ExecutionVertex vertex2 = graph.getJobVertex(jobVertexID2).getTaskVertices()[0]; ExecutionAttemptID attemptID1 = vertex1.getCurrentExecutionAttempt().getAttemptId(); ExecutionAttemptID attemptID2 = vertex2.getCurrentExecutionAttempt().getAttemptId(); OperatorID opID1 = vertex1.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); OperatorID opID2 = vertex2.getJobVertex().getOperatorIDs().get(0).getGeneratedOperatorID(); TaskStateSnapshot taskOperatorSubtaskStates1 = new TaskStateSnapshot(); TaskStateSnapshot taskOperatorSubtaskStates2 = new TaskStateSnapshot(); OperatorSubtaskState subtaskState1 = OperatorSubtaskState.builder().build(); OperatorSubtaskState subtaskState2 = OperatorSubtaskState.builder().build(); taskOperatorSubtaskStates1.putSubtaskStateByOperatorID(opID1, subtaskState1); taskOperatorSubtaskStates2.putSubtaskStateByOperatorID(opID2, subtaskState2); AtomicBoolean coordCheckpointDone = new AtomicBoolean(false); OperatorCoordinatorCheckpointContext coordinatorCheckpointContext = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOnCallingCheckpointCoordinator( (checkpointId, result) -> { coordCheckpointDone.set(true); result.complete(new byte[0]); }) .setOperatorID(opID1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint( Collections.singleton(coordinatorCheckpointContext)) .setCheckpointStorage( new JobManagerCheckpointStorage() { private static final long serialVersionUID = 8134582566514272546L; @Override public CheckpointStorageAccess createCheckpointStorage( JobID jobId) throws IOException { return new MemoryBackendCheckpointStorageAccess( jobId, null, null, 100) { @Override public CheckpointStorageLocation initializeLocationForCheckpoint( long checkpointId) throws IOException { return new NonPersistentMetadataCheckpointStorageLocation( 1000) { @Override public CheckpointMetadataOutputStream createMetadataOutputStream() throws IOException { throw new IOException( "Artificial Exception"); } }; } }; } }) .build(graph); AtomicReference<Long> checkpointIdRef = new AtomicReference<>(); checkpointCoordinator.addMasterHook( new MasterTriggerRestoreHook<Integer>() { @Override public String getIdentifier() { return "anything"; } @Override @Nullable public CompletableFuture<Integer> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) throws Exception { assertThat(coordCheckpointDone.get()) .as("The coordinator checkpoint should have finished.") .isTrue(); checkpointIdRef.set(checkpointId); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID1, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates1); AcknowledgeCheckpoint acknowledgeCheckpoint2 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID2, checkpointId, new CheckpointMetrics(), taskOperatorSubtaskStates2); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint1, TASK_MANAGER_LOCATION_INFO); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint2, TASK_MANAGER_LOCATION_INFO); return null; } @Override public void restoreCheckpoint(long checkpointId, Integer checkpointData) throws Exception {} @Override public SimpleVersionedSerializer<Integer> createCheckpointDataSerializer() { return new SimpleVersionedSerializer<Integer>() { @Override public int getVersion() { return 0; } @Override public byte[] serialize(Integer obj) { return new byte[0]; } @Override public Integer deserialize(int version, byte[] serialized) { return 1; } }; } }); final CompletableFuture<CompletedCheckpoint> checkpointFuture = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointFuture).isCompletedExceptionally(); assertThat(checkpointCoordinator.getSuccessfulCheckpoints()).isEmpty(); } @Test void testResetCalledInRegionRecovery() throws Exception { CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); TestResetHook hook = new TestResetHook("id"); checkpointCoordinator.addMasterHook(hook); assertThat(hook.resetCalled).isFalse(); checkpointCoordinator.restoreLatestCheckpointedStateToSubtasks(Collections.emptySet()); assertThat(hook.resetCalled).isTrue(); } @Test void testNotifyCheckpointAbortionInOperatorCoordinator() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex executionVertex = graph.getJobVertex(jobVertexID).getTaskVertices()[0]; ExecutionAttemptID attemptID = executionVertex.getCurrentExecutionAttempt().getAttemptId(); CheckpointCoordinatorTestingUtils.MockOperatorCoordinatorCheckpointContext context = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOperatorID(new OperatorID()) .setOnCallingCheckpointCoordinator( (ignored, future) -> future.complete(new byte[0])) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setMaxConcurrentCheckpoints(Integer.MAX_VALUE) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint(Collections.singleton(context)) .build(graph); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId1 = Collections.max(checkpointCoordinator.getPendingCheckpoints().keySet()); checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long checkpointId2 = Collections.max(checkpointCoordinator.getPendingCheckpoints().keySet()); AcknowledgeCheckpoint acknowledgeCheckpoint1 = new AcknowledgeCheckpoint( graph.getJobID(), attemptID, checkpointId2, new CheckpointMetrics(), null); checkpointCoordinator.receiveAcknowledgeMessage(acknowledgeCheckpoint1, ""); assertThat(context.getAbortedCheckpoints()) .isEqualTo(Collections.singletonList(checkpointId1)); assertThat(context.getCompletedCheckpoints()) .isEqualTo(Collections.singletonList(checkpointId2)); } finally { checkpointCoordinator.shutdown(); } } @Test void testTimeoutWhileCheckpointOperatorCoordinatorNotFinishing() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); CheckpointCoordinatorTestingUtils.MockOperatorCoordinatorCheckpointContext context = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOperatorID(new OperatorID()) .setOnCallingCheckpointCoordinator( (ignored, future) -> { }) .build(); ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointTimeout(10) .build()) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint(Collections.singleton(context)) .build(graph); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointCoordinator.isTriggering()).isTrue(); manuallyTriggeredScheduledExecutor.triggerNonPeriodicScheduledTasks(); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointCoordinator.isTriggering()).isFalse(); } finally { checkpointCoordinator.shutdown(); executorService.shutdownNow(); } } @Test void testAbortingBeforeTriggeringCheckpointOperatorCoordinator() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .build(EXECUTOR_RESOURCE.getExecutor()); String trigger = "Trigger"; String abort = "Abort"; final List<String> notificationSequence = new ArrayList<>(); CheckpointCoordinatorTestingUtils.MockOperatorCoordinatorCheckpointContext context = new CheckpointCoordinatorTestingUtils .MockOperatorCheckpointCoordinatorContextBuilder() .setOperatorID(new OperatorID()) .setOnCallingCheckpointCoordinator( (id, future) -> { notificationSequence.add(trigger + id); future.complete(new byte[0]); }) .setOnCallingAbortCurrentTriggering(() -> notificationSequence.add(abort)) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointTimeout(10) .build()) .setIoExecutor(manuallyTriggeredScheduledExecutor) .setTimer(manuallyTriggeredScheduledExecutor) .setCoordinatorsToCheckpoint(Collections.singleton(context)) .build(graph); try { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.trigger(); manuallyTriggeredScheduledExecutor.trigger(); manuallyTriggeredScheduledExecutor.trigger(); declineCheckpoint(1L, checkpointCoordinator, jobVertexID, graph); manuallyTriggeredScheduledExecutor.triggerAll(); checkState(!checkpointCoordinator.isTriggering()); checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat( !notificationSequence.contains(trigger + "1") || notificationSequence.indexOf(trigger + "1") < notificationSequence.indexOf(abort)) .isTrue(); } finally { checkpointCoordinator.shutdown(); } } @Test void testReportLatestCompletedCheckpointIdWithAbort() throws Exception { JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); ExecutionVertex task = graph.getJobVertex(jobVertexID).getTaskVertices()[0]; AtomicLong reportedCheckpointId = new AtomicLong(-1); LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway( new SimpleAckingTaskManagerGateway() { @Override public void notifyCheckpointAborted( ExecutionAttemptID executionAttemptID, JobID jobId, long checkpointId, long latestCompletedCheckpointId, long timestamp) { reportedCheckpointId.set(latestCompletedCheckpointId); } }) .createTestingLogicalSlot(); ExecutionGraphTestUtils.setVertexResource(task, slot); task.getCurrentExecutionAttempt().transitionState(ExecutionState.RUNNING); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setAllowCheckpointsAfterTasksFinished(true) .build(graph); CompletableFuture<CompletedCheckpoint> result = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long completedCheckpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), task.getCurrentExecutionAttempt().getAttemptId(), completedCheckpointId, new CheckpointMetrics(), new TaskStateSnapshot()), "localhost"); assertThat(result).isDone(); assertThat(result).isNotCompletedExceptionally(); result = checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); long abortedCheckpointId = checkpointCoordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); checkpointCoordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), task.getCurrentExecutionAttempt().getAttemptId(), abortedCheckpointId, new CheckpointException(CHECKPOINT_EXPIRED)), "localhost"); assertThat(result).isCompletedExceptionally(); assertThat(reportedCheckpointId).hasValue(completedCheckpointId); } @Test void testBaseLocationsNotInitialized() throws Exception { File checkpointDir = TempDirUtils.newFolder(tmpFolder); JobVertexID jobVertexID = new JobVertexID(); ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(jobVertexID) .setTransitToRunning(false) .build(EXECUTOR_RESOURCE.getExecutor()); new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration( CheckpointCoordinatorConfiguration.builder() .setCheckpointInterval(Long.MAX_VALUE) .build()) .setCheckpointStorage(new FileSystemCheckpointStorage(checkpointDir.toURI())) .build(graph); Path jobCheckpointPath = new Path(checkpointDir.getAbsolutePath(), graph.getJobID().toString()); FileSystem fs = FileSystem.get(checkpointDir.toURI()); assertThat(fs.exists(jobCheckpointPath)).isFalse(); } private CheckpointCoordinator getCheckpointCoordinator( ExecutionGraph graph, CheckpointFailureManager failureManager) throws Exception { return new CheckpointCoordinatorBuilder() .setTimer(manuallyTriggeredScheduledExecutor) .setFailureManager(failureManager) .build(graph); } private CheckpointCoordinator getCheckpointCoordinator(ScheduledExecutor timer) throws Exception { ExecutionGraph graph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder() .addJobVertex(new JobVertexID()) .addJobVertex(new JobVertexID()) .build(EXECUTOR_RESOURCE.getExecutor()); return new CheckpointCoordinatorBuilder().setTimer(timer).build(graph); } private CheckpointFailureManager getCheckpointFailureManager(String errorMsg) { return new CheckpointFailureManager( 0, new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { throw new RuntimeException(errorMsg); } @Override public void failJobDueToTaskFailure( Throwable cause, ExecutionAttemptID failingTask) { throw new RuntimeException(errorMsg); } }); } private PendingCheckpoint declineSynchronousSavepoint( final JobID jobId, final CheckpointCoordinator coordinator, final ExecutionAttemptID attemptID, final Throwable reason) { final long checkpointId = coordinator.getPendingCheckpoints().entrySet().iterator().next().getKey(); final PendingCheckpoint checkpoint = coordinator.getPendingCheckpoints().get(checkpointId); coordinator.receiveDeclineMessage( new DeclineCheckpoint( jobId, attemptID, checkpointId, new CheckpointException(CHECKPOINT_DECLINED, reason)), TASK_MANAGER_LOCATION_INFO); return checkpoint; } private void performIncrementalCheckpoint( JobID jobId, CheckpointCoordinator checkpointCoordinator, ExecutionJobVertex jobVertex1, List<KeyGroupRange> keyGroupPartitions1, int cpSequenceNumber) throws Exception { checkpointCoordinator.triggerCheckpoint(false); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointCoordinator.getPendingCheckpoints()).hasSize(1); long checkpointId = Iterables.getOnlyElement(checkpointCoordinator.getPendingCheckpoints().keySet()); for (int index = 0; index < jobVertex1.getParallelism(); index++) { KeyGroupRange keyGroupRange = keyGroupPartitions1.get(index); Map<StateHandleID, StreamStateHandle> privateState = new HashMap<>(); privateState.put( new StateHandleID("private-1"), spy(new ByteStreamStateHandle("private-1", new byte[] {'p'}))); Map<StateHandleID, StreamStateHandle> sharedState = new HashMap<>(); if (cpSequenceNumber > 0) { sharedState.put( new StateHandleID("shared-" + (cpSequenceNumber - 1)), spy(new PlaceholderStreamStateHandle(1L))); } sharedState.put( new StateHandleID("shared-" + cpSequenceNumber), spy( new ByteStreamStateHandle( "shared-" + cpSequenceNumber + "-" + keyGroupRange, new byte[] {'s'}))); IncrementalRemoteKeyedStateHandle managedState = spy( new IncrementalRemoteKeyedStateHandle( new UUID(42L, 42L), keyGroupRange, checkpointId, sharedState, privateState, spy(new ByteStreamStateHandle("meta", new byte[] {'m'})))); OperatorSubtaskState operatorSubtaskState = spy(OperatorSubtaskState.builder().setManagedKeyedState(managedState).build()); Map<OperatorID, OperatorSubtaskState> opStates = new HashMap<>(); opStates.put( jobVertex1.getOperatorIDs().get(0).getGeneratedOperatorID(), operatorSubtaskState); TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot(opStates); AcknowledgeCheckpoint acknowledgeCheckpoint = new AcknowledgeCheckpoint( jobId, jobVertex1 .getTaskVertices()[index] .getCurrentExecutionAttempt() .getAttemptId(), checkpointId, new CheckpointMetrics(), taskStateSnapshot); checkpointCoordinator.receiveAcknowledgeMessage( acknowledgeCheckpoint, TASK_MANAGER_LOCATION_INFO); } } private static class IOExceptionCheckpointIDCounter extends CheckpointIDCounterWithOwner { @Override public long getAndIncrement() throws Exception { checkNotNull(owner); throw new IOException("disk is error!"); } } private static class IOExceptionCheckpointStorage extends JobManagerCheckpointStorage { @Override public CheckpointStorageAccess createCheckpointStorage(JobID jobId) throws IOException { return new MemoryBackendCheckpointStorageAccess(jobId, null, null, 100) { @Override public CheckpointStorageLocation initializeLocationForCheckpoint(long checkpointId) throws IOException { throw new IOException("disk is error!"); } }; } } private static class StoppingCheckpointIDCounter extends CheckpointIDCounterWithOwner { @Override public long getAndIncrement() throws Exception { checkNotNull(owner); owner.stopCheckpointScheduler(); return super.getAndIncrement(); } } private static class CheckpointIDCounterWithOwner extends StandaloneCheckpointIDCounter { protected CheckpointCoordinator owner; void setOwner(CheckpointCoordinator coordinator) { this.owner = checkNotNull(coordinator); } } private static class TestFailJobCallback implements CheckpointFailureManager.FailJobCallback { private int invokeCounter = 0; @Override public void failJob(Throwable cause) { invokeCounter++; } @Override public void failJobDueToTaskFailure( final Throwable cause, final ExecutionAttemptID executionAttemptID) { invokeCounter++; } public int getInvokeCounter() { return invokeCounter; } } private static class TestResetHook implements MasterTriggerRestoreHook<String> { private final String id; boolean resetCalled; TestResetHook(String id) { this.id = id; this.resetCalled = false; } @Override public String getIdentifier() { return id; } @Override public void reset() throws Exception { resetCalled = true; } @Override public CompletableFuture<String> triggerCheckpoint( long checkpointId, long timestamp, Executor executor) { throw new UnsupportedOperationException(); } @Override public void restoreCheckpoint(long checkpointId, @Nullable String checkpointData) throws Exception { throw new UnsupportedOperationException(); } @Override public SimpleVersionedSerializer<String> createCheckpointDataSerializer() { throw new UnsupportedOperationException(); } } private static void verifyDiscard( List<Map<StateHandleID, StreamStateHandle>> sharedHandlesByCheckpoint1, Function<Integer, VerificationMode> checkpointVerify) throws Exception { for (Map<StateHandleID, StreamStateHandle> cpList : sharedHandlesByCheckpoint1) { for (Map.Entry<StateHandleID, StreamStateHandle> entry : cpList.entrySet()) { String key = entry.getKey().getKeyString(); int checkpointID = Integer.parseInt(String.valueOf(key.charAt(key.length() - 1))); VerificationMode verificationMode = checkpointVerify.apply(checkpointID); verify(entry.getValue(), verificationMode).discardState(); } } } private TestingStreamStateHandle handle() { return new TestingStreamStateHandle(); } private void declineCheckpoint( long checkpointId, CheckpointCoordinator coordinator, JobVertexID nackVertexID, ExecutionGraph graph) { coordinator.receiveDeclineMessage( new DeclineCheckpoint( graph.getJobID(), graph.getJobVertex(nackVertexID) .getTaskVertices()[0] .getCurrentExecutionAttempt() .getAttemptId(), checkpointId, new CheckpointException(CHECKPOINT_DECLINED)), "test"); } private void ackCheckpoint( long checkpointId, CheckpointCoordinator coordinator, JobVertexID ackVertexID, ExecutionGraph graph, TestingStreamStateHandle metaState, TestingStreamStateHandle privateState, TestingStreamStateHandle sharedState) throws CheckpointException { Map<StateHandleID, StreamStateHandle> sharedStateMap = new HashMap<>(singletonMap(new StateHandleID("shared-state-key"), sharedState)); Map<StateHandleID, StreamStateHandle> privateStateMap = new HashMap<>(singletonMap(new StateHandleID("private-state-key"), privateState)); ExecutionJobVertex jobVertex = graph.getJobVertex(ackVertexID); OperatorID operatorID = jobVertex.getOperatorIDs().get(0).getGeneratedOperatorID(); coordinator.receiveAcknowledgeMessage( new AcknowledgeCheckpoint( graph.getJobID(), jobVertex.getTaskVertices()[0].getCurrentExecutionAttempt().getAttemptId(), checkpointId, new CheckpointMetrics(), new TaskStateSnapshot( singletonMap( operatorID, OperatorSubtaskState.builder() .setManagedKeyedState( new IncrementalRemoteKeyedStateHandle( UUID.randomUUID(), KeyGroupRange.of(0, 9), checkpointId, sharedStateMap, privateStateMap, metaState)) .build()))), "test"); } }
Also, this error will be given to the beginning of the function. Check and see if you can give the error to the correct position of the relevant field in the annotation.
public void process(FunctionNode functionNode, List<AnnotationAttachmentNode> annotations) { if (!enabled) { return; } BLangPackage parent = (BLangPackage) ((BLangFunction) functionNode).parent; String packageName = getPackageName(parent); TestSuite suite = registry.getTestSuites().get(packageName); if (suite == null) { suite = registry.getTestSuites().computeIfAbsent(packageName, func -> new TestSuite(parent.packageID.name.value, packageName, parent.packageID.orgName.value, parent.packageID.version.value)); } annotations = annotations.stream().distinct().collect(Collectors.toList()); for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); String functionName = functionNode.getName().getValue(); if (BEFORE_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeSuiteFunction(functionName); } else if (AFTER_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterSuiteFunction(functionName); } else if (BEFORE_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeEachFunction(functionName); } else if (AFTER_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterEachFunction(functionName); } else if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String[] vals = new String[2]; vals[0] = packageName; if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<RecordLiteralNode.RecordField> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getFields(); attributes.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); } else { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; name = varNameField.variableName.value; valueExpr = varNameField; } String value = valueExpr.toString(); if (MODULE.equals(name)) { vals[0] = value; } else if (FUNCTION.equals(name)) { vals[1] = value; } }); if (bLangPackage == null) { setBlangPackage(attachmentNode); } if (vals[0].isEmpty()) { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, ((BLangFunction) functionNode).pos, "Module name cannot be empty"); } if (vals[1].isEmpty()) { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, ((BLangFunction) functionNode).pos, "Function name cannot be empty"); } SymbolTable symbolTable = SymbolTable.getInstance(compilerContext); Map<BPackageSymbol, SymbolEnv> packageEnvironmentMap = symbolTable.pkgEnvMap; PackageID functionToMockID = getPackageID(vals[0], packageEnvironmentMap); BType functionToMockType = getFunctionType(packageEnvironmentMap, functionToMockID, vals[1]); BType mockFunctionType = getFunctionType(packageEnvironmentMap, bLangPackage.packageID, ((BLangFunction) functionNode).name.toString()); if (functionToMockType != null && mockFunctionType != null) { if (!typeChecker.isAssignable(mockFunctionType, functionToMockType)) { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, ((BLangFunction) functionNode).pos, "Function parameters and Mock function parameters do not match. Expected " + functionToMockType.toString()); } } else { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, ((BLangFunction) functionNode).pos, "Module " + vals[0] + " not found"); } BLangTestablePackage bLangTestablePackage = (BLangTestablePackage) ((BLangFunction) functionNode).parent; bLangTestablePackage.addMockFunction(vals[0] + MOCK_ANNOTATION_DELIMITER + vals[1], functionName); } } else if (TEST_ANNOTATION_NAME.equals(annotationName)) { Test test = new Test(); test.setTestName(functionName); AtomicBoolean shouldSkip = new AtomicBoolean(); AtomicBoolean groupsFound = new AtomicBoolean(); List<String> groups = registry.getGroups(); boolean shouldIncludeGroups = registry.shouldIncludeGroups(); if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<RecordLiteralNode.RecordField> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getFields(); attributes.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); } else { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; name = varNameField.variableName.value; valueExpr = varNameField; } if (TEST_ENABLE_ANNOTATION_NAME.equals(name) && "false".equals(valueExpr .toString())) { shouldSkip.set(true); return; } if (GROUP_ANNOTATION_NAME.equals(name)) { if (valueExpr instanceof BLangListConstructorExpr) { BLangListConstructorExpr values = (BLangListConstructorExpr) valueExpr; test.setGroups(values.exprs.stream().map(node -> node.toString()) .collect(Collectors.toList())); if (groups != null && !groups.isEmpty()) { boolean isGroupPresent = isGroupAvailable(groups, test.getGroups()); if (shouldIncludeGroups) { if (!isGroupPresent) { shouldSkip.set(true); return; } } else { if (isGroupPresent) { shouldSkip.set(true); return; } } groupsFound.set(true); } } } if (VALUE_SET_ANNOTATION_NAME.equals(name)) { test.setDataProvider(valueExpr.toString()); } if (BEFORE_FUNCTION.equals(name)) { test.setBeforeTestFunction(valueExpr.toString()); } if (AFTER_FUNCTION.equals(name)) { test.setAfterTestFunction(valueExpr.toString()); } if (DEPENDS_ON_FUNCTIONS.equals(name)) { if (valueExpr instanceof BLangListConstructorExpr) { BLangListConstructorExpr values = (BLangListConstructorExpr) valueExpr; values.exprs.stream().map(node -> node.toString()).forEach (test::addDependsOnTestFunction); } } }); } if (groups != null && !groups.isEmpty() && !groupsFound.get() && shouldIncludeGroups) { shouldSkip.set(true); } if (!shouldSkip.get()) { suite.addTests(test); } } else { } } }
"Module name cannot be empty");
public void process(FunctionNode functionNode, List<AnnotationAttachmentNode> annotations) { if (!enabled) { return; } parent = (BLangPackage) ((BLangFunction) functionNode).parent; String packageName = getPackageName(parent); TestSuite suite = registry.getTestSuites().get(packageName); if (suite == null) { suite = registry.getTestSuites().computeIfAbsent(packageName, func -> new TestSuite(parent.packageID.name.value, packageName, parent.packageID.orgName.value, parent.packageID.version.value)); } annotations = annotations.stream().distinct().collect(Collectors.toList()); for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); String functionName = functionNode.getName().getValue(); if (BEFORE_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeSuiteFunction(functionName); } else if (AFTER_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterSuiteFunction(functionName); } else if (BEFORE_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeEachFunction(functionName); } else if (AFTER_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterEachFunction(functionName); } else if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String[] vals = new String[2]; vals[0] = packageName; if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<RecordLiteralNode.RecordField> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getFields(); attributes.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); } else { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; name = varNameField.variableName.value; valueExpr = varNameField; } String value = valueExpr.toString(); if (MODULE.equals(name)) { value = formatPackageName(value); vals[0] = value; } else if (FUNCTION.equals(name)) { vals[1] = value; } }); if (vals[1].isEmpty()) { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, attachmentNode.getPosition(), "function name cannot be empty"); break; } PackageID functionToMockID = getPackageID(vals[0]); if (functionToMockID == null) { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, attachmentNode.getPosition(), "could not find module specified "); } BType functionToMockType = getFunctionType(packageEnvironmentMap, functionToMockID, vals[1]); BType mockFunctionType = getFunctionType(packageEnvironmentMap, parent.packageID, ((BLangFunction) functionNode).name.toString()); if (functionToMockType != null && mockFunctionType != null) { if (!typeChecker.isAssignable(mockFunctionType, functionToMockType)) { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, ((BLangFunction) functionNode).pos, "incompatible types: expected " + functionToMockType.toString() + " but found " + mockFunctionType.toString()); } } else { diagnosticLog.logDiagnostic(Diagnostic.Kind.ERROR, attachmentNode.getPosition(), "could not find functions in module"); } BLangTestablePackage bLangTestablePackage = (BLangTestablePackage) ((BLangFunction) functionNode).parent; bLangTestablePackage.addMockFunction(vals[0] + MOCK_ANNOTATION_DELIMITER + vals[1], functionName); } } else if (TEST_ANNOTATION_NAME.equals(annotationName)) { Test test = new Test(); test.setTestName(functionName); AtomicBoolean shouldSkip = new AtomicBoolean(); AtomicBoolean groupsFound = new AtomicBoolean(); List<String> groups = registry.getGroups(); boolean shouldIncludeGroups = registry.shouldIncludeGroups(); if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<RecordLiteralNode.RecordField> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getFields(); attributes.forEach(field -> { String name; BLangExpression valueExpr; if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField attributeNode = (BLangRecordLiteral.BLangRecordKeyValueField) field; name = attributeNode.getKey().toString(); valueExpr = attributeNode.getValue(); } else { BLangRecordLiteral.BLangRecordVarNameField varNameField = (BLangRecordLiteral.BLangRecordVarNameField) field; name = varNameField.variableName.value; valueExpr = varNameField; } if (TEST_ENABLE_ANNOTATION_NAME.equals(name) && "false".equals(valueExpr .toString())) { shouldSkip.set(true); return; } if (GROUP_ANNOTATION_NAME.equals(name)) { if (valueExpr instanceof BLangListConstructorExpr) { BLangListConstructorExpr values = (BLangListConstructorExpr) valueExpr; test.setGroups(values.exprs.stream().map(node -> node.toString()) .collect(Collectors.toList())); if (groups != null && !groups.isEmpty()) { boolean isGroupPresent = isGroupAvailable(groups, test.getGroups()); if (shouldIncludeGroups) { if (!isGroupPresent) { shouldSkip.set(true); return; } } else { if (isGroupPresent) { shouldSkip.set(true); return; } } groupsFound.set(true); } } } if (VALUE_SET_ANNOTATION_NAME.equals(name)) { test.setDataProvider(valueExpr.toString()); } if (BEFORE_FUNCTION.equals(name)) { test.setBeforeTestFunction(valueExpr.toString()); } if (AFTER_FUNCTION.equals(name)) { test.setAfterTestFunction(valueExpr.toString()); } if (DEPENDS_ON_FUNCTIONS.equals(name)) { if (valueExpr instanceof BLangListConstructorExpr) { BLangListConstructorExpr values = (BLangListConstructorExpr) valueExpr; values.exprs.stream().map(node -> node.toString()).forEach (test::addDependsOnTestFunction); } } }); } if (groups != null && !groups.isEmpty() && !groupsFound.get() && shouldIncludeGroups) { shouldSkip.set(true); } if (!shouldSkip.get()) { suite.addTests(test); } } else { } } }
class TestAnnotationProcessor extends AbstractCompilerPlugin { private static final String TEST_ANNOTATION_NAME = "Config"; private static final String BEFORE_SUITE_ANNOTATION_NAME = "BeforeSuite"; private static final String AFTER_SUITE_ANNOTATION_NAME = "AfterSuite"; private static final String BEFORE_EACH_ANNOTATION_NAME = "BeforeEach"; private static final String AFTER_EACH_ANNOTATION_NAME = "AfterEach"; private static final String MOCK_ANNOTATION_NAME = "Mock"; private static final String BEFORE_FUNCTION = "before"; private static final String AFTER_FUNCTION = "after"; private static final String DEPENDS_ON_FUNCTIONS = "dependsOn"; private static final String MODULE = "moduleName"; private static final String FUNCTION = "functionName"; private static final String GROUP_ANNOTATION_NAME = "groups"; private static final String VALUE_SET_ANNOTATION_NAME = "dataProvider"; private static final String TEST_ENABLE_ANNOTATION_NAME = "enable"; private static final String MOCK_ANNOTATION_DELIMITER = " private TesterinaRegistry registry = TesterinaRegistry.getInstance(); private boolean enabled = true; private CompilerContext compilerContext; private DiagnosticLog diagnosticLog; private Types typeChecker; private SymbolResolver symbolResolver; private BLangPackage bLangPackage; /** * this property is used as a work-around to initialize test suites only once for a package as Compiler * Annotation currently emits package import events too to the process method. */ @Override public void init(DiagnosticLog diagnosticLog) { this.diagnosticLog = diagnosticLog; this.typeChecker = Types.getInstance(compilerContext); this.symbolResolver = SymbolResolver.getInstance(compilerContext); if (TesterinaRegistry.getInstance().isTestSuitesCompiled()) { enabled = false; } } @Override public void setCompilerContext(CompilerContext context) { this.compilerContext = context; } private void setBlangPackage(AnnotationAttachmentNode attachmentNode) { PackageLoader packageLoader = PackageLoader.getInstance(this.compilerContext); Diagnostic.DiagnosticSource source = attachmentNode.getExpression().getPosition().getSource(); this.bLangPackage = packageLoader.loadPackage(((BDiagnosticSource) source).pkgID); } @Override /** * Get the function type by iterating through the packageEnvironmentMap. * @param pkgEnvMap map of BPackageSymbol and its respective SymbolEnv * @param packageID Fully qualified package ID of the respective function * @param functionName Name of the function * @return Function type if found, null if not found */ private BType getFunctionType(Map<BPackageSymbol, SymbolEnv> pkgEnvMap, PackageID packageID, String functionName) { for (Map.Entry<BPackageSymbol, SymbolEnv> entry : pkgEnvMap.entrySet()) { if (entry.getKey().pkgID.equals(packageID)) { BSymbol symbol = symbolResolver.lookupSymbolInMainSpace(entry.getValue(), new Name(functionName)); if (!symbol.getType().toString().equals("other")) { return symbol.getType(); } } } return null; } /** * Returns a PackageID for the passed moduleName. * @param moduleName Module name passed via function annotation * @param pkgEnvMap map of BPackageSymbol and its respective SymbolEnv * @return Module packageID */ private PackageID getPackageID(String moduleName, Map<BPackageSymbol, SymbolEnv> pkgEnvMap) { if (moduleName.equals(".")) { return bLangPackage.packageID; } else { for (Map.Entry<BPackageSymbol, SymbolEnv> entry : pkgEnvMap.entrySet()) { String packageModule = entry.getKey().pkgID.toString(); if (packageModule.equals(moduleName)) { return entry.getKey().pkgID; } else { if (packageModule.contains("/") && packageModule.contains(":")) { if (moduleName.equals(packageModule.substring(packageModule.indexOf('/') + 1, packageModule.indexOf(':')))) { return entry.getKey().pkgID; } } } } } return null; } /** * Check whether there is a common element in two Lists. * * @param inputGroups String @{@link List} to match * @param functionGroups String @{@link List} to match agains * @return true if a match is found */ private boolean isGroupAvailable(List<String> inputGroups, List<String> functionGroups) { for (String group : inputGroups) { for (String funcGroup : functionGroups) { if (group.equals(funcGroup)) { return true; } } } return false; } private String getPackageName(PackageNode packageNode) { BLangPackage bLangPackage = ((BLangPackage) packageNode); return bLangPackage.packageID.toString(); } /*private static int getTestInstructionsPosition(PackageInfo packageInfo) { FunctionInfo testInitFunctionInfo = packageInfo.getTestInitFunctionInfo(); if (testInitFunctionInfo != null) { return testInitFunctionInfo.getDefaultWorkerInfo().getCodeAttributeInfo().getCodeAddrs(); } return packageInfo.getInstructions().length; }*/ }
class TestAnnotationProcessor extends AbstractCompilerPlugin { private static final String TEST_ANNOTATION_NAME = "Config"; private static final String BEFORE_SUITE_ANNOTATION_NAME = "BeforeSuite"; private static final String AFTER_SUITE_ANNOTATION_NAME = "AfterSuite"; private static final String BEFORE_EACH_ANNOTATION_NAME = "BeforeEach"; private static final String AFTER_EACH_ANNOTATION_NAME = "AfterEach"; private static final String MOCK_ANNOTATION_NAME = "Mock"; private static final String BEFORE_FUNCTION = "before"; private static final String AFTER_FUNCTION = "after"; private static final String DEPENDS_ON_FUNCTIONS = "dependsOn"; private static final String MODULE = "moduleName"; private static final String FUNCTION = "functionName"; private static final String GROUP_ANNOTATION_NAME = "groups"; private static final String VALUE_SET_ANNOTATION_NAME = "dataProvider"; private static final String TEST_ENABLE_ANNOTATION_NAME = "enable"; private static final String MOCK_ANNOTATION_DELIMITER = " private TesterinaRegistry registry = TesterinaRegistry.getInstance(); private boolean enabled = true; private CompilerContext compilerContext; private DiagnosticLog diagnosticLog; private Types typeChecker; private SymbolResolver symbolResolver; private BLangPackage parent; private PackageCache packageCache; private Map<BPackageSymbol, SymbolEnv> packageEnvironmentMap; /** * this property is used as a work-around to initialize test suites only once for a package as Compiler * Annotation currently emits package import events too to the process method. */ @Override public void init(DiagnosticLog diagnosticLog) { this.diagnosticLog = diagnosticLog; this.typeChecker = Types.getInstance(compilerContext); this.symbolResolver = SymbolResolver.getInstance(compilerContext); this.packageEnvironmentMap = SymbolTable.getInstance(compilerContext).pkgEnvMap; this.packageCache = PackageCache.getInstance(compilerContext); if (TesterinaRegistry.getInstance().isTestSuitesCompiled()) { enabled = false; } } @Override public void setCompilerContext(CompilerContext context) { this.compilerContext = context; } @Override /** * Get the function type by iterating through the packageEnvironmentMap. * @param pkgEnvMap map of BPackageSymbol and its respective SymbolEnv * @param packageID Fully qualified package ID of the respective function * @param functionName Name of the function * @return Function type if found, null if not found */ private BType getFunctionType(Map<BPackageSymbol, SymbolEnv> pkgEnvMap, PackageID packageID, String functionName) { for (Map.Entry<BPackageSymbol, SymbolEnv> entry : pkgEnvMap.entrySet()) { if (entry.getKey().pkgID.equals(packageID)) { BSymbol symbol = symbolResolver.lookupSymbolInMainSpace(entry.getValue(), new Name(functionName)); if (!symbol.getType().toString().equals("other")) { return symbol.getType(); } } } return null; } /** * Returns a PackageID for the passed moduleName. * @param moduleName Module name passed via function annotation * @return Module packageID */ private PackageID getPackageID(String moduleName) { if (packageCache.getSymbol(moduleName) != null) { return packageCache.getSymbol(moduleName).pkgID; } else { return null; } } /** * Formats the package name obtained from the mock annotation. * Checks for empty, '.', or single module names and replaces them. * Ballerina modules and fully qualified packages are simply returned * @param value * @return */ private String formatPackageName(String value) { if (value.isEmpty() || value.equals(Names.DOT.value)) { value = parent.packageID.toString(); } else if (!value.substring(0, 9).contains(Names.BALLERINA_ORG.value + Names.ORG_NAME_SEPARATOR.value)) { if (!value.contains(Names.ORG_NAME_SEPARATOR.value) && !value.contains(Names.VERSION_SEPARATOR.value)) { value = new PackageID(parent.packageID.orgName, new Name(value), parent.packageID.version).toString(); } } return value; } /** * Check whether there is a common element in two Lists. * * @param inputGroups String @{@link List} to match * @param functionGroups String @{@link List} to match agains * @return true if a match is found */ private boolean isGroupAvailable(List<String> inputGroups, List<String> functionGroups) { for (String group : inputGroups) { for (String funcGroup : functionGroups) { if (group.equals(funcGroup)) { return true; } } } return false; } private String getPackageName(PackageNode packageNode) { BLangPackage bLangPackage = ((BLangPackage) packageNode); return bLangPackage.packageID.toString(); } /*private static int getTestInstructionsPosition(PackageInfo packageInfo) { FunctionInfo testInitFunctionInfo = packageInfo.getTestInitFunctionInfo(); if (testInitFunctionInfo != null) { return testInitFunctionInfo.getDefaultWorkerInfo().getCodeAttributeInfo().getCodeAddrs(); } return packageInfo.getInstructions().length; }*/ }
Since this will be called more than once (in a general scenario when compiling a Ballerina program), I think it is better to compile this Regex once (declare as a top level variable) to reduce the overhead. WDYT?
public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (isInErrorState) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, ctx.getText()); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { String nodeValue = getNodeValue(ctx, node); int literalTypeTag = NumericLiteralSupport.isDecimalDiscriminated(nodeValue) ? TypeTags.DECIMAL : TypeTags.FLOAT; this.pkgBuilder.addLiteralValue(pos, ws, literalTypeTag, nodeValue, node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getHexNodeValue(ctx, node), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); String originalText = text; Pattern pattern = Pattern.compile(Constants.UNICODE_REGEX); Matcher matcher = pattern.matcher(text); int position = 0; while (matcher.find(position)) { String hexStringVal = matcher.group(1); int hexDecimalVal = Integer.parseInt(hexStringVal, 16); if ((hexDecimalVal >= 0xD800 && hexDecimalVal <= 0xDFFF) || hexDecimalVal > 0x10FFFF) { String hexStringWithBraces = matcher.group(0); int offset = originalText.indexOf(hexStringWithBraces) + 1; dlog.error(new DiagnosticPos(diagnosticSrc, pos.sLine, pos.eLine, pos.sCol + offset, pos.sCol + offset + hexStringWithBraces.length()), DiagnosticCode.INVALID_UNICODE, hexStringWithBraces); } text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal)); position = matcher.end() - 2; matcher = pattern.matcher(text); } text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.nilLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "()"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } }
Pattern pattern = Pattern.compile(Constants.UNICODE_REGEX);
public void exitSimpleLiteral(BallerinaParser.SimpleLiteralContext ctx) { if (isInErrorState) { return; } TerminalNode node; DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); Object value; BallerinaParser.IntegerLiteralContext integerLiteralContext = ctx.integerLiteral(); if (integerLiteralContext != null && (value = getIntegerLiteral(ctx, ctx.integerLiteral())) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.INT, value, ctx.getText()); } else if (ctx.floatingPointLiteral() != null) { if ((node = ctx.floatingPointLiteral().DecimalFloatingPointNumber()) != null) { String nodeValue = getNodeValue(ctx, node); int literalTypeTag = NumericLiteralSupport.isDecimalDiscriminated(nodeValue) ? TypeTags.DECIMAL : TypeTags.FLOAT; this.pkgBuilder.addLiteralValue(pos, ws, literalTypeTag, nodeValue, node.getText()); } else if ((node = ctx.floatingPointLiteral().HexadecimalFloatingPointLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.FLOAT, getHexNodeValue(ctx, node), node.getText()); } } else if ((node = ctx.BooleanLiteral()) != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BOOLEAN, Boolean.parseBoolean(node.getText()), node.getText()); } else if ((node = ctx.QuotedStringLiteral()) != null) { String text = node.getText(); text = text.substring(1, text.length() - 1); String originalText = text; Matcher matcher = pattern.matcher(text); int position = 0; while (matcher.find(position)) { String hexStringVal = matcher.group(1); int hexDecimalVal = Integer.parseInt(hexStringVal, 16); if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE) || hexDecimalVal > Constants.MAX_UNICODE) { String hexStringWithBraces = matcher.group(0); int offset = originalText.indexOf(hexStringWithBraces) + 1; dlog.error(new DiagnosticPos(diagnosticSrc, pos.sLine, pos.eLine, pos.sCol + offset, pos.sCol + offset + hexStringWithBraces.length()), DiagnosticCode.INVALID_UNICODE, hexStringWithBraces); } text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal)); position = matcher.end() - 2; matcher = pattern.matcher(text); } text = StringEscapeUtils.unescapeJava(text); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, text, node.getText()); } else if (ctx.NullLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "null"); } else if (ctx.nilLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.NIL, null, "()"); } else if (ctx.blobLiteral() != null) { this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.BYTE_ARRAY, ctx.blobLiteral().getText()); } }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_KEY = "key"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLog dlog; private List<String> pkgNameComps; private String pkgVersion; private boolean isInErrorState = false; BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLog.getInstance(context); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitParameter(BallerinaParser.ParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, ctx.annotationAttachment().size(), ctx.PUBLIC() != null); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.annotationAttachment().size()); } @Override public void exitRestParameterTypeName(BallerinaParser.RestParameterTypeNameContext ctx) { if (isInErrorState) { return; } pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), null, null, 0); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), null, null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (isInErrorState) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().versionPattern().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; final DiagnosticPos varPos = ctx.Identifier() != null ? getCurrentPos(ctx.Identifier()) : serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, false); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder .addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, false, false, false, true); } /** * {@inheritDoc} */ @Override public void enterCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitExternalFunctionBody(BallerinaParser.ExternalFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExternalFunctionBody(ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } int annotCount = ((BallerinaParser.CompilationUnitContext) ctx.parent.parent).annotationAttachment().size(); this.pkgBuilder.startFunctionDef(annotCount, false); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean nativeFunc = ctx.externalFunctionBody() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean privateFunc = ctx.PRIVATE() != null; this.pkgBuilder.endFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, remoteFunc, nativeFunc, privateFunc, bodyExists, false); } @Override public void enterLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.lambdaReturnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } @Override public void enterArrowFunction(BallerinaParser.ArrowFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitArrowFunctionExpression(BallerinaParser.ArrowFunctionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitArrowParam(BallerinaParser.ArrowParamContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, 0); } /** * {@inheritDoc} */ @Override public void exitCallableUnitSignature(BallerinaParser.CallableUnitSignatureContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitSignature(getCurrentPos(ctx), getWS(ctx), ctx.anyIdentifierName().getText(), getCurrentPos(ctx.anyIdentifierName()), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext) || (ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext && ctx.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext && ctx.parent.parent.parent.getChildCount() > 1); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; boolean isClient = ((ObjectTypeNameLabelContext) ctx.parent).CLIENT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract, isClient, false); } @Override public void exitTypeReference(BallerinaParser.TypeReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTypeReference(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; boolean isOptional = ctx.QUESTION_MARK() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, ctx.annotationAttachment().size(), false, isOptional); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; this.pkgBuilder.addObjectFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, annotationCount, isPrivate, isPublic); } /** * {@inheritDoc} */ @Override public void enterObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean nativeFunc = ctx.externalFunctionBody() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, isPrivate, remoteFunc, resourceFunc, nativeFunc, bodyExists, markdownDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.typeName() != null; boolean isConst = ctx.CONST() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicAnnotation, isTypeAttached, isConst); } /** * {@inheritDoc} */ @Override public void exitConstantDefinition(BallerinaParser.ConstantDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isTypeAvailable = ctx.typeName() != null; this.pkgBuilder.addConstant(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isTypeAvailable); } @Override public void exitConstDivMulModExpression(BallerinaParser.ConstDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstAddSubExpression(BallerinaParser.ConstAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstGroupExpression(BallerinaParser.ConstGroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; boolean isListenerVar = ctx.LISTENER() != null; this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isFinal, isDeclaredWithVar, isExpressionAvailable, isListenerVar); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (isInErrorState) { return; } AttachPoint attachPoint; if (ctx.dualAttachPoint() != null) { if (ctx.dualAttachPoint().SOURCE() != null) { attachPoint = AttachPoint.getAttachmentPoint(ctx.dualAttachPoint().dualAttachPointIdent().getText(), true); } else { attachPoint = AttachPoint.getAttachmentPoint(ctx.getText(), false); } } else { attachPoint = AttachPoint.getAttachmentPoint( ctx.sourceOnlyAttachPoint().sourceOnlyAttachPointIdent().getText(), true); } this.pkgBuilder.addAttachPoint(attachPoint, getWS(ctx)); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWorker(diagnosticSrc.pkgID); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } String workerName = null; DiagnosticPos workerNamePos = null; if (ctx.workerDefinition() != null) { workerName = escapeQuotedIdentifier(ctx.workerDefinition().Identifier().getText()); workerNamePos = getCurrentPos(ctx.workerDefinition().Identifier()); } boolean retParamsAvail = ctx.workerDefinition().returnParameter() != null; int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.addWorker( getCurrentPos(ctx), getWS(ctx), workerName, workerNamePos, retParamsAvail, numAnnotations); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (isInErrorState) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1).getText().equals(OPEN_SEALED_ARRAY)) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 1; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 1; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); } @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.tupleTypeDescriptor().typeName().size(), ctx.tupleTypeDescriptor().tupleRestDescriptor() != null); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } @Override public void enterInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, false, false); } @Override public void enterExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, hasRestField, true); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.functionTypeName() != null) { return; } if (ctx.errorTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(pos, getWS(ctx), typeName); } } @Override public void exitErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } boolean reasonTypeExists = !ctx.typeName().isEmpty(); boolean detailsTypeExists = ctx.typeName().size() > 1; boolean isAnonymous = !(ctx.parent.parent.parent.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext) && reasonTypeExists; this.pkgBuilder.addErrorType(getCurrentPos(ctx), getWS(ctx), reasonTypeExists, detailsTypeExists, isAnonymous); } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (isInErrorState) { return; } boolean paramsAvail = false, retParamAvail = false, restParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; if (ctx.parameterList().restParameter() != null) { restParamAvail = true; } } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; if (ctx.parameterTypeNameList().restParameterTypeName() != null) { restParamAvail = true; } } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, restParamAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } @Override public void exitErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.typeName() != null) { if (ctx.errorFieldBindingPatterns().errorRestBindingPattern() != null) { String restIdName = ctx.errorFieldBindingPatterns().errorRestBindingPattern().Identifier().getText(); DiagnosticPos restPos = getCurrentPos(ctx.errorFieldBindingPatterns().errorRestBindingPattern()); this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), restIdName, restPos); } else { this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), null, null); } return; } String reasonIdentifier = ctx.Identifier().getText(); DiagnosticPos currentPos = getCurrentPos(ctx); String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.errorRestBindingPattern() != null) { restIdentifier = ctx.errorRestBindingPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.errorRestBindingPattern()); } this.pkgBuilder.addErrorVariable(currentPos, getWS(ctx), reasonIdentifier, restIdentifier, false, false, restParamPos); } @Override public void enterErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void enterErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void exitSimpleMatchPattern(BallerinaParser.SimpleMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endSimpleMatchPattern(getWS(ctx)); } @Override public void exitErrorArgListMatchPattern(BallerinaParser.ErrorArgListMatchPatternContext ctx) { if (isInErrorState) { return; } String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.restMatchPattern() != null) { restIdentifier = ctx.restMatchPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.restMatchPattern()); } String reasonIdentifier = null; boolean reasonVar = false; boolean constReasonMatchPattern = false; if (ctx.simpleMatchPattern() != null) { reasonVar = ctx.simpleMatchPattern().VAR() != null; if (ctx.simpleMatchPattern().Identifier() != null) { reasonIdentifier = ctx.simpleMatchPattern().Identifier().getText(); } else { reasonIdentifier = ctx.simpleMatchPattern().QuotedStringLiteral().getText(); constReasonMatchPattern = true; } } this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), reasonIdentifier, restIdentifier, reasonVar, constReasonMatchPattern, restParamPos); } @Override public void exitErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } boolean isIndirectErrorMatchPatern = ctx.typeName() != null; this.pkgBuilder.endErrorMatchPattern(getWS(ctx), isIndirectErrorMatchPatern); } @Override public void exitErrorDetailBindingPattern(BallerinaParser.ErrorDetailBindingPatternContext ctx) { if (isInErrorState) { return; } String bindingVarName = null; if (ctx.bindingPattern() != null && ctx.bindingPattern().Identifier() != null) { bindingVarName = ctx.bindingPattern().Identifier().getText(); } this.pkgBuilder.addErrorDetailBinding(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), bindingVarName); } @Override public void exitErrorRefBindingPattern(BallerinaParser.ErrorRefBindingPatternContext ctx) { if (isInErrorState) { return; } int numNamedArgs = ctx.errorNamedArgRefPattern().size(); boolean reasonRefAvailable = ctx.variableReference() != null; boolean restPatternAvailable = ctx.errorRefRestPattern() != null; boolean indirectErrorRefPattern = ctx.typeName() != null; this.pkgBuilder.addErrorVariableReference(getCurrentPos(ctx), getWS(ctx), numNamedArgs, reasonRefAvailable, restPatternAvailable, indirectErrorRefPattern); } @Override public void exitErrorNamedArgRefPattern(BallerinaParser.ErrorNamedArgRefPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } @Override public void exitListBindingPattern(BallerinaParser.ListBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restBindingAvailable = ctx.restBindingPattern() != null; this.pkgBuilder.addTupleVariable(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().size(), restBindingAvailable); } @Override public void exitListRefBindingPattern(BallerinaParser.ListRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restPatternAvailable = ctx.listRefRestPattern() != null; this.pkgBuilder.addTupleVariableReference(getCurrentPos(ctx), getWS(ctx), ctx.bindingRefPattern().size(), restPatternAvailable); } @Override public void enterRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableList(); } @Override public void exitRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryBindingPattern().restBindingPattern() != null; this.pkgBuilder.addRecordVariable(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void enterRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableReferenceList(); } @Override public void exitRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryRefBindingPattern().restRefBindingPattern() != null; this.pkgBuilder.addRecordVariableReference(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void exitBindingPattern(BallerinaParser.BindingPatternContext ctx) { if (isInErrorState) { return; } if ((ctx.Identifier() != null) && ((ctx.parent instanceof BallerinaParser.ListBindingPatternContext) || (ctx.parent instanceof BallerinaParser.FieldBindingPatternContext) || (ctx.parent instanceof BallerinaParser.MatchPatternClauseContext))) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } else if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternNameWhitespace(getWS(ctx)); } } @Override public void exitFieldBindingPattern(BallerinaParser.FieldBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.bindingPattern() != null); } @Override public void exitFieldRefBindingPattern(BallerinaParser.FieldRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldRefBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingRefPattern() != null); } @Override public void exitRestBindingPattern(BallerinaParser.RestBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (isInErrorState) { return; } boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; if (ctx.Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), getCurrentPos(ctx.bindingPattern().Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().listBindingPattern() != null) { this.pkgBuilder.addTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordKeyValue(BallerinaParser.RecordKeyValueContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addKeyValueRecord(getWS(ctx), ctx.recordKey().LEFT_BRACKET() != null); } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } else if (ctx.LEFT_BRACKET() != null) { this.pkgBuilder.addRecordKeyWS(getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumnDefinition(BallerinaParser.TableColumnDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableColumnDefinition(getWS(ctx)); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (isInErrorState) { return; } String columnName; int childCount = ctx.getChildCount(); if (childCount == 2) { boolean keyColumn = KEYWORD_KEY.equals(ctx.getChild(0).getText()); if (keyColumn) { columnName = escapeQuotedIdentifier(ctx.getChild(1).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { DiagnosticPos pos = getCurrentPos(ctx); dlog.error(pos, DiagnosticCode.TABLE_KEY_EXPECTED); } } else { columnName = escapeQuotedIdentifier(ctx.getChild(0).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); } } @Override public void exitTableDataArray(BallerinaParser.TableDataArrayContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataArray(getWS(ctx)); } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (isInErrorState) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(getWS(ctx)); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListConstructorExpr(BallerinaParser.ListConstructorExprContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addListConstructorExpression(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (isInErrorState) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitServiceConstructorExpression(BallerinaParser.ServiceConstructorExpressionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = null; final DiagnosticPos varPos = serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, true, ctx.serviceConstructorExpr().annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListDestructuringStatement(BallerinaParser.ListDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordDestructuringStatement(BallerinaParser.RecordDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitErrorDestructuringStatement(BallerinaParser.ErrorDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (isInErrorState) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } if (ctx.bindingPattern() != null || ctx.errorMatchPattern() != null) { boolean isTypeGuardPresent = ctx.IF() != null; this.pkgBuilder.addMatchStmtStructuredBindingPattern(getCurrentPos(ctx), getWS(ctx), isTypeGuardPresent); return; } this.pkgBuilder.addMatchStmtStaticBindingPattern(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { String identifier = ctx.bindingPattern().Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.bindingPattern().Identifier()); this.pkgBuilder.addForeachStatementWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), identifier, identifierPos, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.addForeachStatementWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitPanicStatement(BallerinaParser.PanicStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addPanicStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitWorkerReceiveExpression(BallerinaParser.WorkerReceiveExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerReceiveExpr(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression() != null); } @Override public void exitFlushWorker(BallerinaParser.FlushWorkerContext ctx) { if (isInErrorState) { return; } String workerName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addWorkerFlushExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWorkerSendAsyncStatement(BallerinaParser.WorkerSendAsyncStatementContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression().size() > 1); } @Override public void exitWorkerSendSyncExpression(BallerinaParser.WorkerSendSyncExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendSyncExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWaitExpression(BallerinaParser.WaitExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.waitForCollection() != null) { this.pkgBuilder.handleWaitForAll(getCurrentPos(ctx), getWS(ctx)); } else { this.pkgBuilder.handleWait(getCurrentPos(ctx), getWS(ctx)); } } @Override public void enterWaitForCollection(BallerinaParser.WaitForCollectionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWaitForAll(); } @Override public void exitWaitKeyValue(BallerinaParser.WaitKeyValueContext ctx) { if (isInErrorState) { return; } boolean containsExpr = ctx.expression() != null; this.pkgBuilder.addKeyValueToWaitForAll(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), containsExpr); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { if (isInErrorState) { return; } boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitStringFunctionInvocationReference(BallerinaParser.StringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupStringFunctionInvocationReference(GroupStringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); InvocationContext invocation = ctx.invocation(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); this.pkgBuilder.createGroupExpression(getCurrentPos(node), getWS(ctx)); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (isInErrorState) { return; } createFieldBasedAccessNode(ctx, ctx.field()); } @Override public void exitGroupFieldVariableReference(BallerinaParser.GroupFieldVariableReferenceContext ctx) { if (isInErrorState) { return; } FieldContext field = ctx.field(); VariableReferenceContext groupExpression = ctx.variableReference(); createFieldBasedAccessNode(field, field); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } private void createFieldBasedAccessNode(ParserRuleContext ctx, FieldContext field) { String fieldName; DiagnosticPos fieldNamePos; FieldKind fieldType; if (field.Identifier() != null) { fieldName = field.Identifier().getText(); fieldNamePos = getCurrentPos(field); fieldType = FieldKind.SINGLE; } else { fieldName = field.MUL().getText(); fieldNamePos = getCurrentPos(field); fieldType = FieldKind.ALL; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, fieldNamePos, fieldType, field.OPTIONAL_FIELD_ACCESS() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupMapArrayVariableReference(BallerinaParser.GroupMapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } IndexContext index = ctx.index(); VariableReferenceContext groupExpression = ctx.variableReference(); this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(index), getWS(index)); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (isInErrorState) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupInvocationReference(BallerinaParser.GroupInvocationReferenceContext ctx) { if (isInErrorState) { return; } InvocationContext invocation = ctx.invocation(); VariableReferenceContext groupExpression = ctx.variableReference(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } @Override public void exitTypeDescExprInvocationReference(BallerinaParser.TypeDescExprInvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.endTransactionStmt(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCommittedBlock(); } /** * {@inheritDoc} */ @Override public void exitCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCommittedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAbortedBlock(); } /** * {@inheritDoc} */ @Override public void exitAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endAbortedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (isInErrorState) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); DiagnosticPos pos = getCurrentPos(ctx); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; DiagnosticPos prefixPos = (ctx.Identifier() != null) ? getCurrentPos(ctx.Identifier()) : null; this.pkgBuilder.addXMLNSDeclaration(pos, getWS(ctx), namespaceUri, prefix, prefixPos, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryRefEqualExpression(BallerinaParser.BinaryRefEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchOrExpression(BallerinaParser.StaticMatchOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchIdentifierLiteral(BallerinaParser.StaticMatchIdentifierLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTypeDescExpr(BallerinaParser.TypeDescExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (isInErrorState) { return; } int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null, numAnnotations); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (isInErrorState) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size(), ctx.typeName() != null); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitTypeTestExpression(BallerinaParser.TypeTestExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeTestExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitAnnotAccessExpression(BallerinaParser.AnnotAccessExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createAnnotAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupExpression(BallerinaParser.GroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckPanickedExpression(BallerinaParser.CheckPanickedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckPanickedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void exitLambdaReturnParameter(BallerinaParser.LambdaReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override private String fillWithZeros(String str) { while (str.length() < 4) { str = "0".concat(str); } return str; } /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (isInErrorState) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (isInErrorState) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (isInErrorState) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (isInErrorState) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (isInErrorState) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (isInErrorState) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } @Override public void exitTrapExpression(BallerinaParser.TrapExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTrapExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.START() != null) { int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx), numAnnotations); } } @Override public void exitDocumentationReference(BallerinaParser.DocumentationReferenceContext ctx) { if (isInErrorState) { return; } BallerinaParser.ReferenceTypeContext referenceType = ctx.referenceType(); BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endDocumentationReference(getCurrentPos(ctx), referenceType.getText(), backtickedContent.getText()); } @Override public void exitSingleBacktickedBlock(BallerinaParser.SingleBacktickedBlockContext ctx) { if (isInErrorState) { return; } BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endSingleBacktickedBlock(getCurrentPos(ctx), backtickedContent.getText()); } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + (stop.getStopIndex() - stop.getStartIndex() + 1) + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPos(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getTemplateEndingStr(List<TerminalNode> nodes) { StringJoiner joiner = new StringJoiner(""); nodes.forEach(node -> joiner.add(node.getText())); return joiner.toString(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } /** * Mark that this listener is in error state. */ public void setErrorState() { this.isInErrorState = true; } /** * Mark that this listener is not in an error state. */ public void unsetErrorState() { this.isInErrorState = false; } boolean isInErrorState() { return this.isInErrorState; } }
class BLangParserListener extends BallerinaParserBaseListener { private static final String KEYWORD_PUBLIC = "public"; private static final String KEYWORD_KEY = "key"; private BLangPackageBuilder pkgBuilder; private BDiagnosticSource diagnosticSrc; private BLangDiagnosticLog dlog; private List<String> pkgNameComps; private String pkgVersion; private boolean isInErrorState = false; private Pattern pattern = Pattern.compile(Constants.UNICODE_REGEX); BLangParserListener(CompilerContext context, CompilationUnitNode compUnit, BDiagnosticSource diagnosticSource) { this.pkgBuilder = new BLangPackageBuilder(context, compUnit); this.diagnosticSrc = diagnosticSource; this.dlog = BLangDiagnosticLog.getInstance(context); } @Override public void enterParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitParameter(BallerinaParser.ParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, ctx.annotationAttachment().size(), ctx.PUBLIC() != null); } /** * {@inheritDoc} */ @Override public void enterFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitFormalParameterList(BallerinaParser.FormalParameterListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFormalParameterList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDefaultableParameter(BallerinaParser.DefaultableParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addDefaultableParam(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRestParameter(BallerinaParser.RestParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.annotationAttachment().size()); } @Override public void exitRestParameterTypeName(BallerinaParser.RestParameterTypeNameContext ctx) { if (isInErrorState) { return; } pkgBuilder.addRestParam(getCurrentPos(ctx), getWS(ctx), null, null, 0); } /** * {@inheritDoc} */ @Override public void exitParameterTypeName(BallerinaParser.ParameterTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addSimpleVar(getCurrentPos(ctx), getWS(ctx), null, null, false, 0); } @Override public void enterCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} */ @Override public void exitCompilationUnit(BallerinaParser.CompilationUnitContext ctx) { this.pkgBuilder.endCompilationUnit(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitPackageName(BallerinaParser.PackageNameContext ctx) { if (isInErrorState) { return; } this.pkgNameComps = new ArrayList<>(); ctx.Identifier().forEach(e -> pkgNameComps.add(e.getText())); this.pkgVersion = ctx.version() != null ? ctx.version().versionPattern().getText() : null; } /** * {@inheritDoc} */ @Override public void exitImportDeclaration(BallerinaParser.ImportDeclarationContext ctx) { if (isInErrorState) { return; } String alias = ctx.Identifier() != null ? ctx.Identifier().getText() : null; BallerinaParser.OrgNameContext orgNameContext = ctx.orgName(); if (orgNameContext == null) { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), null, this.pkgNameComps, this.pkgVersion, alias); } else { this.pkgBuilder.addImportPackageDeclaration(getCurrentPos(ctx), getWS(ctx), orgNameContext.getText(), this.pkgNameComps, this.pkgVersion, alias); } } /** * {@inheritDoc} */ @Override public void exitServiceDefinition(BallerinaParser.ServiceDefinitionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; final DiagnosticPos varPos = ctx.Identifier() != null ? getCurrentPos(ctx.Identifier()) : serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, false); } /** * {@inheritDoc} */ @Override public void enterServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startServiceDef(getCurrentPos(ctx)); this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitServiceBody(BallerinaParser.ServiceBodyContext ctx) { if (isInErrorState) { return; } boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder .addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, false, false, false, true); } /** * {@inheritDoc} */ @Override public void enterCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitCallableUnitBody(BallerinaParser.CallableUnitBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitBody(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitExternalFunctionBody(BallerinaParser.ExternalFunctionBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExternalFunctionBody(ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } int annotCount = ((BallerinaParser.CompilationUnitContext) ctx.parent.parent).annotationAttachment().size(); this.pkgBuilder.startFunctionDef(annotCount, false); } /** * {@inheritDoc} */ @Override public void exitFunctionDefinition(BallerinaParser.FunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean nativeFunc = ctx.externalFunctionBody() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean privateFunc = ctx.PRIVATE() != null; this.pkgBuilder.endFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, remoteFunc, nativeFunc, privateFunc, bodyExists, false); } @Override public void enterLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLambdaFunctionDef(diagnosticSrc.pkgID); } @Override public void exitLambdaFunction(BallerinaParser.LambdaFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLambdaFunctionDef(getCurrentPos(ctx), getWS(ctx), ctx.formalParameterList() != null, ctx.lambdaReturnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } @Override public void enterArrowFunction(BallerinaParser.ArrowFunctionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } @Override public void exitArrowFunctionExpression(BallerinaParser.ArrowFunctionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addArrowFunctionDef(getCurrentPos(ctx), getWS(ctx), diagnosticSrc.pkgID); } @Override public void exitArrowParam(BallerinaParser.ArrowParamContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addVarWithoutType(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), false, 0); } /** * {@inheritDoc} */ @Override public void exitCallableUnitSignature(BallerinaParser.CallableUnitSignatureContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCallableUnitSignature(getCurrentPos(ctx), getWS(ctx), ctx.anyIdentifierName().getText(), getCurrentPos(ctx.anyIdentifierName()), ctx.formalParameterList() != null, ctx.returnParameter() != null, ctx.formalParameterList() != null && ctx.formalParameterList().restParameter() != null); } /** * {@inheritDoc} */ @Override public void exitFiniteType(BallerinaParser.FiniteTypeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endFiniteType(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTypeDefinition(BallerinaParser.TypeDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicObject = ctx.PUBLIC() != null; this.pkgBuilder.endTypeDefinition(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicObject); } /** * {@inheritDoc} */ @Override public void enterObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectType(); } /** * {@inheritDoc} */ @Override public void exitObjectBody(BallerinaParser.ObjectBodyContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext) || (ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext && ctx.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext && ctx.parent.parent.parent.getChildCount() > 1); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean isAbstract = ((ObjectTypeNameLabelContext) ctx.parent).ABSTRACT() != null; boolean isClient = ((ObjectTypeNameLabelContext) ctx.parent).CLIENT() != null; this.pkgBuilder.addObjectType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, isAbstract, isClient, false); } @Override public void exitTypeReference(BallerinaParser.TypeReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTypeReference(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitFieldDefinition(BallerinaParser.FieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; boolean isOptional = ctx.QUESTION_MARK() != null; this.pkgBuilder.addFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, ctx.annotationAttachment().size(), false, isOptional); } /** * {@inheritDoc} */ @Override public void exitObjectFieldDefinition(BallerinaParser.ObjectFieldDefinitionContext ctx) { if (isInErrorState) { return; } DiagnosticPos currentPos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String name = ctx.Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.Identifier()); boolean exprAvailable = ctx.expression() != null; int annotationCount = ctx.annotationAttachment().size(); boolean isPrivate = ctx.PRIVATE() != null; boolean isPublic = ctx.PUBLIC() != null; this.pkgBuilder.addObjectFieldVariable(currentPos, ws, name, identifierPos, exprAvailable, annotationCount, isPrivate, isPublic); } /** * {@inheritDoc} */ @Override public void enterObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startObjectFunctionDef(); } /** * {@inheritDoc} */ @Override public void exitObjectFunctionDefinition(BallerinaParser.ObjectFunctionDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicFunc = ctx.PUBLIC() != null; boolean isPrivate = ctx.PRIVATE() != null; boolean remoteFunc = ctx.REMOTE() != null; boolean resourceFunc = ctx.RESOURCE() != null; boolean nativeFunc = ctx.externalFunctionBody() != null; boolean bodyExists = ctx.callableUnitBody() != null; boolean markdownDocExists = ctx.documentationString() != null; this.pkgBuilder.endObjectAttachedFunctionDef(getCurrentPos(ctx), getWS(ctx), publicFunc, isPrivate, remoteFunc, resourceFunc, nativeFunc, bodyExists, markdownDocExists, ctx.annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void enterAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationDef(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationDefinition(BallerinaParser.AnnotationDefinitionContext ctx) { if (isInErrorState) { return; } boolean publicAnnotation = KEYWORD_PUBLIC.equals(ctx.getChild(0).getText()); boolean isTypeAttached = ctx.typeName() != null; boolean isConst = ctx.CONST() != null; this.pkgBuilder.endAnnotationDef(getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), publicAnnotation, isTypeAttached, isConst); } /** * {@inheritDoc} */ @Override public void exitConstantDefinition(BallerinaParser.ConstantDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isTypeAvailable = ctx.typeName() != null; this.pkgBuilder.addConstant(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isTypeAvailable); } @Override public void exitConstDivMulModExpression(BallerinaParser.ConstDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstAddSubExpression(BallerinaParser.ConstAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitConstGroupExpression(BallerinaParser.ConstGroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitGlobalVariableDefinition(BallerinaParser.GlobalVariableDefinitionContext ctx) { if (isInErrorState) { return; } boolean isPublic = ctx.PUBLIC() != null; boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; boolean isListenerVar = ctx.LISTENER() != null; this.pkgBuilder.addGlobalVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isPublic, isFinal, isDeclaredWithVar, isExpressionAvailable, isListenerVar); } @Override public void exitAttachmentPoint(BallerinaParser.AttachmentPointContext ctx) { if (isInErrorState) { return; } AttachPoint attachPoint; if (ctx.dualAttachPoint() != null) { if (ctx.dualAttachPoint().SOURCE() != null) { attachPoint = AttachPoint.getAttachmentPoint(ctx.dualAttachPoint().dualAttachPointIdent().getText(), true); } else { attachPoint = AttachPoint.getAttachmentPoint(ctx.getText(), false); } } else { attachPoint = AttachPoint.getAttachmentPoint( ctx.sourceOnlyAttachPoint().sourceOnlyAttachPointIdent().getText(), true); } this.pkgBuilder.addAttachPoint(attachPoint, getWS(ctx)); } @Override public void enterWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWorker(diagnosticSrc.pkgID); } @Override public void exitWorkerDeclaration(BallerinaParser.WorkerDeclarationContext ctx) { if (isInErrorState) { return; } String workerName = null; DiagnosticPos workerNamePos = null; if (ctx.workerDefinition() != null) { workerName = escapeQuotedIdentifier(ctx.workerDefinition().Identifier().getText()); workerNamePos = getCurrentPos(ctx.workerDefinition().Identifier()); } boolean retParamsAvail = ctx.workerDefinition().returnParameter() != null; int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.addWorker( getCurrentPos(ctx), getWS(ctx), workerName, workerNamePos, retParamsAvail, numAnnotations); } /** * {@inheritDoc} */ @Override public void exitWorkerDefinition(BallerinaParser.WorkerDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachWorkerWS(getWS(ctx)); } @Override public void exitArrayTypeNameLabel(BallerinaParser.ArrayTypeNameLabelContext ctx) { if (isInErrorState) { return; } int index = 1; int dimensions = 0; List<Integer> sizes = new ArrayList<>(); List<ParseTree> children = ctx.children; while (index < children.size()) { if (children.get(index).getText().equals("[")) { if (children.get(index + 1).getText().equals("]")) { sizes.add(UNSEALED_ARRAY_INDICATOR); index += 2; } else if (children.get(index + 1).getText().equals(OPEN_SEALED_ARRAY)) { sizes.add(OPEN_SEALED_ARRAY_INDICATOR); index += 1; } else { sizes.add(Integer.parseInt(children.get(index + 1).getText())); index += 1; } dimensions++; } else { index++; } } Collections.reverse(sizes); this.pkgBuilder.addArrayType( getCurrentPos(ctx), getWS(ctx), dimensions, sizes.stream().mapToInt(val -> val).toArray()); } @Override public void exitUnionTypeNameLabel(BallerinaParser.UnionTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUnionType(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTupleTypeNameLabel(BallerinaParser.TupleTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleType(getCurrentPos(ctx), getWS(ctx), ctx.tupleTypeDescriptor().typeName().size(), ctx.tupleTypeDescriptor().tupleRestDescriptor() != null); } @Override public void exitNullableTypeNameLabel(BallerinaParser.NullableTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsNullable(getWS(ctx)); } @Override public void exitGroupTypeNameLabel(BallerinaParser.GroupTypeNameLabelContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.markTypeNodeAsGrouped(getWS(ctx)); } @Override public void enterInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitInclusiveRecordTypeDescriptor(BallerinaParser.InclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, false, false); } @Override public void enterExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordType(); } @Override public void exitExclusiveRecordTypeDescriptor(BallerinaParser.ExclusiveRecordTypeDescriptorContext ctx) { if (isInErrorState) { return; } boolean isAnonymous = !(ctx.parent.parent instanceof BallerinaParser.FiniteTypeUnitContext); boolean isFieldAnalyseRequired = (ctx.parent.parent instanceof BallerinaParser.GlobalVariableDefinitionContext || ctx.parent.parent instanceof BallerinaParser.ReturnParameterContext) || ctx.parent.parent.parent.parent instanceof BallerinaParser.TypeDefinitionContext; boolean hasRestField = ctx.recordRestFieldDefinition() != null; this.pkgBuilder.addRecordType(getCurrentPos(ctx), getWS(ctx), isFieldAnalyseRequired, isAnonymous, hasRestField, true); } @Override public void exitSimpleTypeName(BallerinaParser.SimpleTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.referenceTypeName() != null || ctx.valueTypeName() != null) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitUserDefineTypeName(BallerinaParser.UserDefineTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addUserDefineType(getWS(ctx)); } @Override public void exitValueTypeName(BallerinaParser.ValueTypeNameContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addValueType(getCurrentPos(ctx), getWS(ctx), ctx.getText()); } @Override public void exitBuiltInReferenceTypeName(BallerinaParser.BuiltInReferenceTypeNameContext ctx) { if (isInErrorState) { return; } if (ctx.functionTypeName() != null) { return; } if (ctx.errorTypeName() != null) { return; } String typeName = ctx.getChild(0).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (ctx.typeName() != null) { this.pkgBuilder.addConstraintTypeWithTypeName(pos, getWS(ctx), typeName); } else { this.pkgBuilder.addBuiltInReferenceType(pos, getWS(ctx), typeName); } } @Override public void exitErrorTypeName(BallerinaParser.ErrorTypeNameContext ctx) { if (isInErrorState) { return; } boolean reasonTypeExists = !ctx.typeName().isEmpty(); boolean detailsTypeExists = ctx.typeName().size() > 1; boolean isAnonymous = !(ctx.parent.parent.parent.parent.parent.parent instanceof BallerinaParser.FiniteTypeContext) && reasonTypeExists; this.pkgBuilder.addErrorType(getCurrentPos(ctx), getWS(ctx), reasonTypeExists, detailsTypeExists, isAnonymous); } @Override public void exitFunctionTypeName(BallerinaParser.FunctionTypeNameContext ctx) { if (isInErrorState) { return; } boolean paramsAvail = false, retParamAvail = false, restParamAvail = false; if (ctx.parameterList() != null) { paramsAvail = ctx.parameterList().parameter().size() > 0; if (ctx.parameterList().restParameter() != null) { restParamAvail = true; } } else if (ctx.parameterTypeNameList() != null) { paramsAvail = ctx.parameterTypeNameList().parameterTypeName().size() > 0; if (ctx.parameterTypeNameList().restParameterTypeName() != null) { restParamAvail = true; } } if (ctx.returnParameter() != null) { retParamAvail = true; } this.pkgBuilder.addFunctionType(getCurrentPos(ctx), getWS(ctx), paramsAvail, restParamAvail, retParamAvail); } /** * {@inheritDoc} */ @Override public void enterAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAnnotationAttachment(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitAnnotationAttachment(BallerinaParser.AnnotationAttachmentContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.setAnnotationAttachmentName(getWS(ctx), ctx.recordLiteral() != null, getCurrentPos(ctx), false); } @Override public void exitErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.typeName() != null) { if (ctx.errorFieldBindingPatterns().errorRestBindingPattern() != null) { String restIdName = ctx.errorFieldBindingPatterns().errorRestBindingPattern().Identifier().getText(); DiagnosticPos restPos = getCurrentPos(ctx.errorFieldBindingPatterns().errorRestBindingPattern()); this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), restIdName, restPos); } else { this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), null, null); } return; } String reasonIdentifier = ctx.Identifier().getText(); DiagnosticPos currentPos = getCurrentPos(ctx); String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.errorRestBindingPattern() != null) { restIdentifier = ctx.errorRestBindingPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.errorRestBindingPattern()); } this.pkgBuilder.addErrorVariable(currentPos, getWS(ctx), reasonIdentifier, restIdentifier, false, false, restParamPos); } @Override public void enterErrorBindingPattern(BallerinaParser.ErrorBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void enterErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startErrorBindingNode(); } @Override public void exitSimpleMatchPattern(BallerinaParser.SimpleMatchPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endSimpleMatchPattern(getWS(ctx)); } @Override public void exitErrorArgListMatchPattern(BallerinaParser.ErrorArgListMatchPatternContext ctx) { if (isInErrorState) { return; } String restIdentifier = null; DiagnosticPos restParamPos = null; if (ctx.restMatchPattern() != null) { restIdentifier = ctx.restMatchPattern().Identifier().getText(); restParamPos = getCurrentPos(ctx.restMatchPattern()); } String reasonIdentifier = null; boolean reasonVar = false; boolean constReasonMatchPattern = false; if (ctx.simpleMatchPattern() != null) { reasonVar = ctx.simpleMatchPattern().VAR() != null; if (ctx.simpleMatchPattern().Identifier() != null) { reasonIdentifier = ctx.simpleMatchPattern().Identifier().getText(); } else { reasonIdentifier = ctx.simpleMatchPattern().QuotedStringLiteral().getText(); constReasonMatchPattern = true; } } this.pkgBuilder.addErrorVariable(getCurrentPos(ctx), getWS(ctx), reasonIdentifier, restIdentifier, reasonVar, constReasonMatchPattern, restParamPos); } @Override public void exitErrorMatchPattern(BallerinaParser.ErrorMatchPatternContext ctx) { if (isInErrorState) { return; } boolean isIndirectErrorMatchPatern = ctx.typeName() != null; this.pkgBuilder.endErrorMatchPattern(getWS(ctx), isIndirectErrorMatchPatern); } @Override public void exitErrorDetailBindingPattern(BallerinaParser.ErrorDetailBindingPatternContext ctx) { if (isInErrorState) { return; } String bindingVarName = null; if (ctx.bindingPattern() != null && ctx.bindingPattern().Identifier() != null) { bindingVarName = ctx.bindingPattern().Identifier().getText(); } this.pkgBuilder.addErrorDetailBinding(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), bindingVarName); } @Override public void exitErrorRefBindingPattern(BallerinaParser.ErrorRefBindingPatternContext ctx) { if (isInErrorState) { return; } int numNamedArgs = ctx.errorNamedArgRefPattern().size(); boolean reasonRefAvailable = ctx.variableReference() != null; boolean restPatternAvailable = ctx.errorRefRestPattern() != null; boolean indirectErrorRefPattern = ctx.typeName() != null; this.pkgBuilder.addErrorVariableReference(getCurrentPos(ctx), getWS(ctx), numNamedArgs, reasonRefAvailable, restPatternAvailable, indirectErrorRefPattern); } @Override public void exitErrorNamedArgRefPattern(BallerinaParser.ErrorNamedArgRefPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } @Override public void exitListBindingPattern(BallerinaParser.ListBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restBindingAvailable = ctx.restBindingPattern() != null; this.pkgBuilder.addTupleVariable(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().size(), restBindingAvailable); } @Override public void exitListRefBindingPattern(BallerinaParser.ListRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean restPatternAvailable = ctx.listRefRestPattern() != null; this.pkgBuilder.addTupleVariableReference(getCurrentPos(ctx), getWS(ctx), ctx.bindingRefPattern().size(), restPatternAvailable); } @Override public void enterRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableList(); } @Override public void exitRecordBindingPattern(BallerinaParser.RecordBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryBindingPattern().restBindingPattern() != null; this.pkgBuilder.addRecordVariable(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void enterRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startRecordVariableReferenceList(); } @Override public void exitRecordRefBindingPattern(BallerinaParser.RecordRefBindingPatternContext ctx) { if (isInErrorState) { return; } boolean hasRestBindingPattern = ctx.entryRefBindingPattern().restRefBindingPattern() != null; this.pkgBuilder.addRecordVariableReference(getCurrentPos(ctx), getWS(ctx), hasRestBindingPattern); } @Override public void exitBindingPattern(BallerinaParser.BindingPatternContext ctx) { if (isInErrorState) { return; } if ((ctx.Identifier() != null) && ((ctx.parent instanceof BallerinaParser.ListBindingPatternContext) || (ctx.parent instanceof BallerinaParser.FieldBindingPatternContext) || (ctx.parent instanceof BallerinaParser.MatchPatternClauseContext))) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } else if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternNameWhitespace(getWS(ctx)); } } @Override public void exitFieldBindingPattern(BallerinaParser.FieldBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), ctx.bindingPattern() != null); } @Override public void exitFieldRefBindingPattern(BallerinaParser.FieldRefBindingPatternContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFieldRefBindingMemberVar(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), ctx.bindingRefPattern() != null); } @Override public void exitRestBindingPattern(BallerinaParser.RestBindingPatternContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { this.pkgBuilder.addBindingPatternMemberVariable(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier())); } } @Override public void exitVariableDefinitionStatement(BallerinaParser.VariableDefinitionStatementContext ctx) { if (isInErrorState) { return; } boolean isFinal = ctx.FINAL() != null; boolean isDeclaredWithVar = ctx.VAR() != null; boolean isExpressionAvailable = ctx.expression() != null; if (ctx.Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), getCurrentPos(ctx.Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().Identifier() != null) { this.pkgBuilder.addSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), ctx.bindingPattern().Identifier().getText(), getCurrentPos(ctx.bindingPattern().Identifier()), isFinal, isExpressionAvailable, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().listBindingPattern() != null) { this.pkgBuilder.addTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isFinal, isDeclaredWithVar); } } @Override public void enterRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMapStructLiteral(); } @Override public void exitRecordLiteral(BallerinaParser.RecordLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addMapStructLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordKeyValue(BallerinaParser.RecordKeyValueContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addKeyValueRecord(getWS(ctx), ctx.recordKey().LEFT_BRACKET() != null); } @Override public void exitRecordKey(BallerinaParser.RecordKeyContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.addNameReference(pos, getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(pos, getWS(ctx)); } else if (ctx.LEFT_BRACKET() != null) { this.pkgBuilder.addRecordKeyWS(getWS(ctx)); } } @Override public void enterTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTableLiteral(); } @Override public void exitTableColumnDefinition(BallerinaParser.TableColumnDefinitionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableColumnDefinition(getWS(ctx)); } @Override public void exitTableColumn(BallerinaParser.TableColumnContext ctx) { if (isInErrorState) { return; } String columnName; int childCount = ctx.getChildCount(); if (childCount == 2) { boolean keyColumn = KEYWORD_KEY.equals(ctx.getChild(0).getText()); if (keyColumn) { columnName = escapeQuotedIdentifier(ctx.getChild(1).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); this.pkgBuilder.markPrimaryKeyColumn(columnName); } else { DiagnosticPos pos = getCurrentPos(ctx); dlog.error(pos, DiagnosticCode.TABLE_KEY_EXPECTED); } } else { columnName = escapeQuotedIdentifier(ctx.getChild(0).getText()); this.pkgBuilder.addTableColumn(columnName, getCurrentPos(ctx), getWS(ctx)); } } @Override public void exitTableDataArray(BallerinaParser.TableDataArrayContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataArray(getWS(ctx)); } @Override public void exitTableDataList(BallerinaParser.TableDataListContext ctx) { if (isInErrorState) { return; } if (ctx.expressionList() != null) { this.pkgBuilder.endTableDataRow(getWS(ctx)); } } @Override public void exitTableData(BallerinaParser.TableDataContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTableDataList(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTableLiteral(BallerinaParser.TableLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTableLiteral(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListConstructorExpr(BallerinaParser.ListConstructorExprContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.expressionList() != null; this.pkgBuilder.addListConstructorExpression(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitTypeInitExpr(BallerinaParser.TypeInitExprContext ctx) { if (isInErrorState) { return; } String initName = ctx.NEW().getText(); boolean typeAvailable = ctx.userDefineTypeName() != null; boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.addTypeInitExpression(getCurrentPos(ctx), getWS(ctx), initName, typeAvailable, argsAvailable); } @Override public void exitServiceConstructorExpression(BallerinaParser.ServiceConstructorExpressionContext ctx) { if (isInErrorState) { return; } final DiagnosticPos serviceDefPos = getCurrentPos(ctx); final String serviceVarName = null; final DiagnosticPos varPos = serviceDefPos; this.pkgBuilder.endServiceDef(serviceDefPos, getWS(ctx), serviceVarName, varPos, true, ctx.serviceConstructorExpr().annotationAttachment().size()); } /** * {@inheritDoc} */ @Override public void exitAssignmentStatement(BallerinaParser.AssignmentStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAssignmentStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitListDestructuringStatement(BallerinaParser.ListDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTupleDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitRecordDestructuringStatement(BallerinaParser.RecordDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRecordDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitErrorDestructuringStatement(BallerinaParser.ErrorDestructuringStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addErrorDestructuringStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitCompoundAssignmentStatement(BallerinaParser.CompoundAssignmentStatementContext ctx) { if (isInErrorState) { return; } String compoundOperatorText = ctx.compoundOperator().getText(); String operator = compoundOperatorText.substring(0, compoundOperatorText.length() - 1); this.pkgBuilder.addCompoundAssignmentStatement(getCurrentPos(ctx), getWS(ctx), operator); } /** * {@inheritDoc} */ @Override public void exitCompoundOperator(BallerinaParser.CompoundOperatorContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addCompoundOperator(getWS(ctx)); } @Override public void enterVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitVariableReferenceList(BallerinaParser.VariableReferenceListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } /** * {@inheritDoc} */ @Override public void enterIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfElseStatement(BallerinaParser.IfElseStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endIfElseNode(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitIfClause(BallerinaParser.IfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startIfElseNode(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitElseIfClause(BallerinaParser.ElseIfClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseIfBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startBlock(); } /** * {@inheritDoc} */ @Override public void exitElseClause(BallerinaParser.ElseClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addElseBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createMatchNode(getCurrentPos(ctx)); } @Override public void exitMatchStatement(BallerinaParser.MatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.completeMatchNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMatchStmtPattern(); } @Override public void exitMatchPatternClause(BallerinaParser.MatchPatternClauseContext ctx) { if (isInErrorState) { return; } if (ctx.bindingPattern() != null || ctx.errorMatchPattern() != null) { boolean isTypeGuardPresent = ctx.IF() != null; this.pkgBuilder.addMatchStmtStructuredBindingPattern(getCurrentPos(ctx), getWS(ctx), isTypeGuardPresent); return; } this.pkgBuilder.addMatchStmtStaticBindingPattern(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForeachStatement(); } @Override public void exitForeachStatement(BallerinaParser.ForeachStatementContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { String identifier = ctx.bindingPattern().Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.bindingPattern().Identifier()); this.pkgBuilder.addForeachStatementWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), identifier, identifierPos, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.addForeachStatementWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.addForeachStatementWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } @Override public void exitIntRangeExpression(BallerinaParser.IntRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addIntRangeExpression(getCurrentPos(ctx), getWS(ctx), ctx.LEFT_PARENTHESIS() == null, ctx.RIGHT_PARENTHESIS() == null, ctx.expression(1) == null); } /** * {@inheritDoc} */ @Override public void enterWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWhileStmt(); } /** * {@inheritDoc} */ @Override public void exitWhileStatement(BallerinaParser.WhileStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addWhileStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitContinueStatement(BallerinaParser.ContinueStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addContinueStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitBreakStatement(BallerinaParser.BreakStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addBreakStatement(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startForkJoinStmt(); } @Override public void exitForkJoinStatement(BallerinaParser.ForkJoinStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addForkJoinStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTryCatchFinallyStmt(); } @Override public void exitTryCatchStatement(BallerinaParser.TryCatchStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryCatchFinallyStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void enterCatchClauses(BallerinaParser.CatchClausesContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTryClause(getCurrentPos(ctx)); } @Override public void enterCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCatchClause(); } @Override public void exitCatchClause(BallerinaParser.CatchClauseContext ctx) { if (isInErrorState) { return; } String paramName = ctx.Identifier().getText(); this.pkgBuilder.addCatchClause(getCurrentPos(ctx), getWS(ctx), paramName); } @Override public void enterFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startFinallyBlock(); } @Override public void exitFinallyClause(BallerinaParser.FinallyClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addFinallyBlock(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitThrowStatement(BallerinaParser.ThrowStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addThrowStmt(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitPanicStatement(BallerinaParser.PanicStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addPanicStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitReturnStatement(BallerinaParser.ReturnStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnStatement(this.getCurrentPos(ctx), getWS(ctx), ctx.expression() != null); } @Override public void exitWorkerReceiveExpression(BallerinaParser.WorkerReceiveExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerReceiveExpr(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression() != null); } @Override public void exitFlushWorker(BallerinaParser.FlushWorkerContext ctx) { if (isInErrorState) { return; } String workerName = ctx.Identifier() != null ? ctx.Identifier().getText() : null; this.pkgBuilder.addWorkerFlushExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWorkerSendAsyncStatement(BallerinaParser.WorkerSendAsyncStatementContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendStmt(getCurrentPos(ctx), getWS(ctx), workerName, ctx.expression().size() > 1); } @Override public void exitWorkerSendSyncExpression(BallerinaParser.WorkerSendSyncExpressionContext ctx) { if (isInErrorState) { return; } String workerName = ctx.peerWorker().DEFAULT() != null ? ctx.peerWorker().DEFAULT().getText() : ctx.peerWorker().workerName().getText(); this.pkgBuilder.addWorkerSendSyncExpr(getCurrentPos(ctx), getWS(ctx), workerName); } @Override public void exitWaitExpression(BallerinaParser.WaitExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.waitForCollection() != null) { this.pkgBuilder.handleWaitForAll(getCurrentPos(ctx), getWS(ctx)); } else { this.pkgBuilder.handleWait(getCurrentPos(ctx), getWS(ctx)); } } @Override public void enterWaitForCollection(BallerinaParser.WaitForCollectionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startWaitForAll(); } @Override public void exitWaitKeyValue(BallerinaParser.WaitKeyValueContext ctx) { if (isInErrorState) { return; } boolean containsExpr = ctx.expression() != null; this.pkgBuilder.addKeyValueToWaitForAll(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText(), containsExpr); } /** * {@inheritDoc} */ @Override public void exitXmlAttribVariableReference(BallerinaParser.XmlAttribVariableReferenceContext ctx) { if (isInErrorState) { return; } boolean isSingleAttrRef = ctx.xmlAttrib().expression() != null; this.pkgBuilder.createXmlAttributesRefExpr(getCurrentPos(ctx), getWS(ctx), isSingleAttrRef); } @Override public void exitSimpleVariableReference(BallerinaParser.SimpleVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitStringFunctionInvocationReference(BallerinaParser.StringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupStringFunctionInvocationReference(GroupStringFunctionInvocationReferenceContext ctx) { if (isInErrorState) { return; } TerminalNode node = ctx.QuotedStringLiteral(); DiagnosticPos pos = getCurrentPos(ctx); Set<Whitespace> ws = getWS(ctx); String actualText = node.getText(); actualText = actualText.substring(1, actualText.length() - 1); actualText = StringEscapeUtils.unescapeJava(actualText); this.pkgBuilder.addLiteralValue(pos, ws, TypeTags.STRING, actualText, node.getText()); InvocationContext invocation = ctx.invocation(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); this.pkgBuilder.createGroupExpression(getCurrentPos(node), getWS(ctx)); } @Override public void exitFunctionInvocation(BallerinaParser.FunctionInvocationContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocationArgList() != null; this.pkgBuilder.createFunctionInvocation(getCurrentPos(ctx), getWS(ctx), argsAvailable); } @Override public void exitFieldVariableReference(BallerinaParser.FieldVariableReferenceContext ctx) { if (isInErrorState) { return; } createFieldBasedAccessNode(ctx, ctx.field()); } @Override public void exitGroupFieldVariableReference(BallerinaParser.GroupFieldVariableReferenceContext ctx) { if (isInErrorState) { return; } FieldContext field = ctx.field(); VariableReferenceContext groupExpression = ctx.variableReference(); createFieldBasedAccessNode(field, field); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } private void createFieldBasedAccessNode(ParserRuleContext ctx, FieldContext field) { String fieldName; DiagnosticPos fieldNamePos; FieldKind fieldType; if (field.Identifier() != null) { fieldName = field.Identifier().getText(); fieldNamePos = getCurrentPos(field); fieldType = FieldKind.SINGLE; } else { fieldName = field.MUL().getText(); fieldNamePos = getCurrentPos(field); fieldType = FieldKind.ALL; } this.pkgBuilder.createFieldBasedAccessNode(getCurrentPos(ctx), getWS(ctx), fieldName, fieldNamePos, fieldType, field.OPTIONAL_FIELD_ACCESS() != null); } @Override public void exitMapArrayVariableReference(BallerinaParser.MapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupMapArrayVariableReference(BallerinaParser.GroupMapArrayVariableReferenceContext ctx) { if (isInErrorState) { return; } IndexContext index = ctx.index(); VariableReferenceContext groupExpression = ctx.variableReference(); this.pkgBuilder.createIndexBasedAccessNode(getCurrentPos(index), getWS(index)); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } @Override public void exitReservedWord(BallerinaParser.ReservedWordContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startInvocationNode(getWS(ctx)); } @Override public void exitAnyIdentifierName(BallerinaParser.AnyIdentifierNameContext ctx) { if (isInErrorState) { return; } if (ctx.reservedWord() == null) { this.pkgBuilder.startInvocationNode(getWS(ctx)); } } @Override public void exitInvocationReference(BallerinaParser.InvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } @Override public void exitGroupInvocationReference(BallerinaParser.GroupInvocationReferenceContext ctx) { if (isInErrorState) { return; } InvocationContext invocation = ctx.invocation(); VariableReferenceContext groupExpression = ctx.variableReference(); boolean argsAvailable = invocation.invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = invocation.anyIdentifierName(); String invocationText = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(invocation), getWS(invocation), invocationText, argsAvailable, getCurrentPos(identifierContext)); this.pkgBuilder.createGroupExpression(getCurrentPos(groupExpression), getWS(groupExpression)); } @Override public void exitTypeDescExprInvocationReference(BallerinaParser.TypeDescExprInvocationReferenceContext ctx) { if (isInErrorState) { return; } boolean argsAvailable = ctx.invocation().invocationArgList() != null; BallerinaParser.AnyIdentifierNameContext identifierContext = ctx.invocation().anyIdentifierName(); String invocation = identifierContext.getText(); this.pkgBuilder.createInvocationNode(getCurrentPos(ctx), getWS(ctx), invocation, argsAvailable, getCurrentPos(identifierContext)); } /** * {@inheritDoc} */ @Override public void enterInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } /** * {@inheritDoc} */ @Override public void exitInvocationArgList(BallerinaParser.InvocationArgListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } public void enterExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startExprNodeList(); } @Override public void exitExpressionList(BallerinaParser.ExpressionListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endExprNodeList(getWS(ctx), ctx.getChildCount() / 2 + 1); } @Override public void exitExpressionStmt(BallerinaParser.ExpressionStmtContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addExpressionStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startTransactionStmt(); } /** * {@inheritDoc} */ @Override public void exitTransactionStatement(BallerinaParser.TransactionStatementContext ctx) { if (isInErrorState) { return; } DiagnosticPos pos = getCurrentPos(ctx); this.pkgBuilder.endTransactionStmt(pos, getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionClause(BallerinaParser.TransactionClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addTransactionBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTransactionPropertyInitStatementList( BallerinaParser.TransactionPropertyInitStatementListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endTransactionPropertyInitStatementList(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startLockStmt(); } /** * {@inheritDoc} */ @Override public void exitLockStatement(BallerinaParser.LockStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addLockStmt(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startOnretryBlock(); } /** * {@inheritDoc} */ @Override public void exitOnretryClause(BallerinaParser.OnretryClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addOnretryBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startCommittedBlock(); } /** * {@inheritDoc} */ @Override public void exitCommittedClause(BallerinaParser.CommittedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endCommittedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startAbortedBlock(); } /** * {@inheritDoc} */ @Override public void exitAbortedClause(BallerinaParser.AbortedClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endAbortedBlock(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitAbortStatement(BallerinaParser.AbortStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addAbortStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetryStatement(BallerinaParser.RetryStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryStatement(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitRetriesStatement(BallerinaParser.RetriesStatementContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRetryCountExpression(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void enterNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { } @Override public void exitNamespaceDeclaration(BallerinaParser.NamespaceDeclarationContext ctx) { if (isInErrorState) { return; } boolean isTopLevel = ctx.parent instanceof BallerinaParser.CompilationUnitContext; String namespaceUri = ctx.QuotedStringLiteral().getText(); DiagnosticPos pos = getCurrentPos(ctx); namespaceUri = namespaceUri.substring(1, namespaceUri.length() - 1); namespaceUri = StringEscapeUtils.unescapeJava(namespaceUri); String prefix = (ctx.Identifier() != null) ? ctx.Identifier().getText() : null; DiagnosticPos prefixPos = (ctx.Identifier() != null) ? getCurrentPos(ctx.Identifier()) : null; this.pkgBuilder.addXMLNSDeclaration(pos, getWS(ctx), namespaceUri, prefix, prefixPos, isTopLevel); } @Override public void exitBinaryDivMulModExpression(BallerinaParser.BinaryDivMulModExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryOrExpression(BallerinaParser.BinaryOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryRefEqualExpression(BallerinaParser.BinaryRefEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryEqualExpression(BallerinaParser.BinaryEqualExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchOrExpression(BallerinaParser.StaticMatchOrExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitStaticMatchIdentifierLiteral(BallerinaParser.StaticMatchIdentifierLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, ctx.Identifier().getText()); this.pkgBuilder.createSimpleVariableReference(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitTypeDescExpr(BallerinaParser.TypeDescExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeAccessExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitActionInvocation(BallerinaParser.ActionInvocationContext ctx) { if (isInErrorState) { return; } int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.createActionInvocationNode(getCurrentPos(ctx), getWS(ctx), ctx.START() != null, numAnnotations); } @Override public void exitBinaryAndExpression(BallerinaParser.BinaryAndExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBinaryAddSubExpression(BallerinaParser.BinaryAddSubExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseExpression(BallerinaParser.BitwiseExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitBitwiseShiftExpression(BallerinaParser.BitwiseShiftExpressionContext ctx) { if (isInErrorState) { return; } StringBuilder operator = new StringBuilder(); for (int i = 1; i < ctx.getChildCount() - 1; i++) { operator.append(ctx.getChild(i).getText()); } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), operator.toString()); } /** * {@inheritDoc} */ @Override public void exitTypeConversionExpression(BallerinaParser.TypeConversionExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeConversionExpr(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size(), ctx.typeName() != null); } @Override public void exitBinaryCompareExpression(BallerinaParser.BinaryCompareExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitIntegerRangeExpression(BallerinaParser.IntegerRangeExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createBinaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(1).getText()); } @Override public void exitUnaryExpression(BallerinaParser.UnaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createUnaryExpr(getCurrentPos(ctx), getWS(ctx), ctx.getChild(0).getText()); } @Override public void exitTypeTestExpression(BallerinaParser.TypeTestExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTypeTestExpression(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitAnnotAccessExpression(BallerinaParser.AnnotAccessExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createAnnotAccessNode(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitGroupExpression(BallerinaParser.GroupExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createGroupExpression(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitTernaryExpression(BallerinaParser.TernaryExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTernaryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckedExpression(BallerinaParser.CheckedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitCheckPanickedExpression(BallerinaParser.CheckPanickedExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createCheckPanickedExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitFromClause(BallerinaParser.FromClauseContext ctx) { if (isInErrorState) { return; } boolean isDeclaredWithVar = ctx.VAR() != null; if (ctx.bindingPattern().Identifier() != null) { String identifier = ctx.bindingPattern().Identifier().getText(); DiagnosticPos identifierPos = getCurrentPos(ctx.bindingPattern().Identifier()); this.pkgBuilder.createFromClauseWithSimpleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), identifier, identifierPos, isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().recordBindingPattern() != null) { this.pkgBuilder.createFromClauseWithRecordVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else if (ctx.bindingPattern().structuredBindingPattern().errorBindingPattern() != null) { this.pkgBuilder.createFromClauseWithErrorVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } else { this.pkgBuilder.createFromClauseWithTupleVariableDefStatement(getCurrentPos(ctx), getWS(ctx), isDeclaredWithVar); } } @Override public void exitWhereClause(BallerinaParser.WhereClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createWhereClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitSelectClause(BallerinaParser.SelectClauseContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createSelectClause(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitQueryExpr(BallerinaParser.QueryExprContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createQueryExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitNameReference(BallerinaParser.NameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier().size() == 2) { String pkgName = ctx.Identifier(0).getText(); String name = ctx.Identifier(1).getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.Identifier(0).getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } @Override public void exitFunctionNameReference(BallerinaParser.FunctionNameReferenceContext ctx) { if (isInErrorState) { return; } if (ctx.Identifier() != null) { String pkgName = ctx.Identifier().getText(); String name = ctx.anyIdentifierName().getText(); DiagnosticPos pos = getCurrentPos(ctx); if (Names.IGNORE.value.equals(pkgName)) { dlog.error(pos, DiagnosticCode.INVALID_PACKAGE_NAME_QUALIFER, pkgName); } this.pkgBuilder.addNameReference(pos, getWS(ctx), pkgName, name); } else { String name = ctx.anyIdentifierName().getText(); this.pkgBuilder.addNameReference(getCurrentPos(ctx), getWS(ctx), null, name); } } /** * {@inheritDoc} */ @Override public void exitReturnParameter(BallerinaParser.ReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void exitLambdaReturnParameter(BallerinaParser.LambdaReturnParameterContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addReturnParam(getCurrentPos(ctx), getWS(ctx), ctx.annotationAttachment().size()); } @Override public void enterParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startVarList(); } /** * {@inheritDoc} */ @Override public void exitParameterTypeNameList(BallerinaParser.ParameterTypeNameListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitParameterList(BallerinaParser.ParameterListContext ctx) { if (isInErrorState) { return; } ParserRuleContext parent = ctx.getParent(); boolean inFuncTypeSig = parent instanceof BallerinaParser.FunctionTypeNameContext || parent instanceof BallerinaParser.ReturnParameterContext && parent.parent instanceof BallerinaParser.FunctionTypeNameContext; if (inFuncTypeSig) { this.pkgBuilder.endFuncTypeParamList(getWS(ctx)); } else { this.pkgBuilder.endCallableParamList(getWS(ctx)); } } private String fillWithZeros(String str) { while (str.length() < 4) { str = "0".concat(str); } return str; } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void exitNamedArgs(BallerinaParser.NamedArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addNamedArgument(getCurrentPos(ctx), getWS(ctx), ctx.Identifier().getText()); } /** * {@inheritDoc} */ @Override public void exitRestArgs(BallerinaParser.RestArgsContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.addRestArgument(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitXmlLiteral(BallerinaParser.XmlLiteralContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.attachXmlLiteralWS(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitComment(BallerinaParser.CommentContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLCommentTemplateText()); String endingString = getTemplateEndingStr(ctx.XMLCommentText()); this.pkgBuilder.createXMLCommentLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitElement(BallerinaParser.ElementContext ctx) { if (isInErrorState) { return; } if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitStartTag(BallerinaParser.StartTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitCloseTag(BallerinaParser.CloseTagContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endXMLElement(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitEmptyTag(BallerinaParser.EmptyTagContext ctx) { if (isInErrorState) { return; } boolean isRoot = ctx.parent.parent instanceof BallerinaParser.XmlItemContext; this.pkgBuilder.startXMLElement(getCurrentPos(ctx), getWS(ctx), isRoot); } /** * {@inheritDoc} */ @Override public void exitProcIns(BallerinaParser.ProcInsContext ctx) { if (isInErrorState) { return; } String targetQName = ctx.XML_TAG_SPECIAL_OPEN().getText(); targetQName = targetQName.substring(2, targetQName.length() - 1); Stack<String> textFragments = getTemplateTextFragments(ctx.XMLPITemplateText()); String endingText = getTemplateEndingStr(ctx.XMLPIText()); endingText = endingText.substring(0, endingText.length() - 2); this.pkgBuilder.createXMLPILiteral(getCurrentPos(ctx), getWS(ctx), targetQName, textFragments, endingText); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addChildToXMLElement(getWS(ctx)); } } /** * {@inheritDoc} */ @Override public void exitAttribute(BallerinaParser.AttributeContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createXMLAttribute(getCurrentPos(ctx), getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitText(BallerinaParser.TextContext ctx) { if (isInErrorState) { return; } Stack<String> textFragments = getTemplateTextFragments(ctx.XMLTemplateText()); String endingText = getTemplateEndingStr(ctx.XMLText()); if (ctx.getParent() instanceof BallerinaParser.ContentContext) { this.pkgBuilder.addXMLTextToElement(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } else { this.pkgBuilder.createXMLTextLiteral(getCurrentPos(ctx), getWS(ctx), textFragments, endingText); } } /** * {@inheritDoc} */ @Override public void exitXmlSingleQuotedString(BallerinaParser.XmlSingleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLSingleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLSingleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.SINGLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlDoubleQuotedString(BallerinaParser.XmlDoubleQuotedStringContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments = getTemplateTextFragments(ctx.XMLDoubleQuotedTemplateString()); String endingString = getTemplateEndingStr(ctx.XMLDoubleQuotedString()); this.pkgBuilder.createXMLQuotedLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingString, QuoteType.DOUBLE_QUOTE); } /** * {@inheritDoc} */ @Override public void exitXmlQualifiedName(BallerinaParser.XmlQualifiedNameContext ctx) { if (isInErrorState) { return; } List<TerminalNode> qnames = ctx.XMLQName(); String prefix = null; String localname; if (qnames.size() > 1) { prefix = qnames.get(0).getText(); localname = qnames.get(1).getText(); } else { localname = qnames.get(0).getText(); } this.pkgBuilder.createXMLQName(getCurrentPos(ctx), getWS(ctx), localname, prefix); } /** * {@inheritDoc} */ @Override public void exitStringTemplateLiteral(BallerinaParser.StringTemplateLiteralContext ctx) { if (isInErrorState) { return; } Stack<String> stringFragments; String endingText = null; StringTemplateContentContext contentContext = ctx.stringTemplateContent(); if (contentContext != null) { stringFragments = getTemplateTextFragments(contentContext.StringTemplateExpressionStart()); endingText = getTemplateEndingStr(contentContext.StringTemplateText()); } else { stringFragments = new Stack<>(); } this.pkgBuilder.createStringTemplateLiteral(getCurrentPos(ctx), getWS(ctx), stringFragments, endingText); } /** * {@inheritDoc} */ @Override public void enterDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.startMarkdownDocumentationString(getCurrentPos(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationString(BallerinaParser.DocumentationStringContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkdownDocumentationString(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationLine(BallerinaParser.DocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endMarkDownDocumentLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitDocumentationContent(BallerinaParser.DocumentationContentContext ctx) { if (isInErrorState) { return; } String text = ctx.getText() != null ? ctx.getText() : ""; this.pkgBuilder.endMarkdownDocumentationText(getCurrentPos(ctx), getWS(ctx), text); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentationLine(BallerinaParser.ParameterDocumentationLineContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.endParameterDocumentationLine(getWS(ctx)); } /** * {@inheritDoc} */ @Override public void exitParameterDocumentation(BallerinaParser.ParameterDocumentationContext ctx) { if (isInErrorState) { return; } String parameterName = ctx.docParameterName() != null ? ctx.docParameterName().getText() : ""; String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentation(getCurrentPos(ctx.docParameterName()), getWS(ctx), parameterName, description); } /** * {@inheritDoc} */ @Override public void exitParameterDescriptionLine(BallerinaParser.ParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endParameterDocumentationDescription(getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDocumentation(BallerinaParser.ReturnParameterDocumentationContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentation(getCurrentPos(ctx.getParent()), getWS(ctx), description); } /** * {@inheritDoc} */ @Override public void exitReturnParameterDescriptionLine(BallerinaParser.ReturnParameterDescriptionLineContext ctx) { if (isInErrorState) { return; } String description = ctx.documentationText() != null ? ctx.documentationText().getText() : ""; this.pkgBuilder.endReturnParameterDocumentationDescription(getWS(ctx), description); } @Override public void exitTrapExpression(BallerinaParser.TrapExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createTrapExpr(getCurrentPos(ctx), getWS(ctx)); } @Override public void exitVariableReferenceExpression(BallerinaParser.VariableReferenceExpressionContext ctx) { if (isInErrorState) { return; } if (ctx.START() != null) { int numAnnotations = ctx.annotationAttachment().size(); this.pkgBuilder.markLastInvocationAsAsync(getCurrentPos(ctx), numAnnotations); } } @Override public void exitDocumentationReference(BallerinaParser.DocumentationReferenceContext ctx) { if (isInErrorState) { return; } BallerinaParser.ReferenceTypeContext referenceType = ctx.referenceType(); BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endDocumentationReference(getCurrentPos(ctx), referenceType.getText(), backtickedContent.getText()); } @Override public void exitSingleBacktickedBlock(BallerinaParser.SingleBacktickedBlockContext ctx) { if (isInErrorState) { return; } BallerinaParser.SingleBacktickedContentContext backtickedContent = ctx.singleBacktickedContent(); this.pkgBuilder.endSingleBacktickedBlock(getCurrentPos(ctx), backtickedContent.getText()); } /** * {@inheritDoc} */ @Override public void exitElvisExpression(BallerinaParser.ElvisExpressionContext ctx) { if (isInErrorState) { return; } this.pkgBuilder.createElvisExpr(getCurrentPos(ctx), getWS(ctx)); } private DiagnosticPos getCurrentPos(ParserRuleContext ctx) { int startLine = ctx.getStart().getLine(); int startCol = ctx.getStart().getCharPositionInLine() + 1; int endLine = -1; int endCol = -1; Token stop = ctx.getStop(); if (stop != null) { endLine = stop.getLine(); endCol = stop.getCharPositionInLine() + (stop.getStopIndex() - stop.getStartIndex() + 1) + 1; } return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } private DiagnosticPos getCurrentPos(TerminalNode node) { Token symbol = node.getSymbol(); int startLine = symbol.getLine(); int startCol = symbol.getCharPositionInLine() + 1; int endLine = startLine; int endCol = startCol + symbol.getText().length(); return new DiagnosticPos(diagnosticSrc, startLine, endLine, startCol, endCol); } protected Set<Whitespace> getWS(ParserRuleContext ctx) { return null; } private Stack<String> getTemplateTextFragments(List<TerminalNode> nodes) { Stack<String> templateStrFragments = new Stack<>(); nodes.forEach(node -> { if (node == null) { templateStrFragments.push(null); } else { String str = node.getText(); templateStrFragments.push(str.substring(0, str.length() - 2)); } }); return templateStrFragments; } private String getTemplateEndingStr(TerminalNode node) { return node == null ? null : node.getText(); } private String getTemplateEndingStr(List<TerminalNode> nodes) { StringJoiner joiner = new StringJoiner(""); nodes.forEach(node -> joiner.add(node.getText())); return joiner.toString(); } private String getNodeValue(ParserRuleContext ctx, TerminalNode node) { String op = ctx.getChild(0).getText(); String value = node.getText(); if (op != null && "-".equals(op)) { value = "-" + value; } return value; } private String getHexNodeValue(ParserRuleContext ctx, TerminalNode node) { String value = getNodeValue(ctx, node); if (!(value.contains("p") || value.contains("P"))) { value = value + "p0"; } return value; } private Object getIntegerLiteral(ParserRuleContext simpleLiteralContext, BallerinaParser.IntegerLiteralContext integerLiteralContext) { if (integerLiteralContext.DecimalIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.DecimalIntegerLiteral()); return parseLong(simpleLiteralContext, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL, DiagnosticCode.INTEGER_TOO_LARGE); } else if (integerLiteralContext.HexIntegerLiteral() != null) { String nodeValue = getNodeValue(simpleLiteralContext, integerLiteralContext.HexIntegerLiteral()); String processedNodeValue = nodeValue.toLowerCase().replace("0x", ""); return parseLong(simpleLiteralContext, nodeValue, processedNodeValue, 16, DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE); } return null; } private Object parseLong(ParserRuleContext context, String originalNodeValue, String processedNodeValue, int radix, DiagnosticCode code1, DiagnosticCode code2) { try { return Long.parseLong(processedNodeValue, radix); } catch (Exception e) { DiagnosticPos pos = getCurrentPos(context); Set<Whitespace> ws = getWS(context); if (originalNodeValue.startsWith("-")) { dlog.error(pos, code1, originalNodeValue); } else { dlog.error(pos, code2, originalNodeValue); } } return originalNodeValue; } /** * Mark that this listener is in error state. */ public void setErrorState() { this.isInErrorState = true; } /** * Mark that this listener is not in an error state. */ public void unsetErrorState() { this.isInErrorState = false; } boolean isInErrorState() { return this.isInErrorState; } }
Yes, if 'use v2 rollup' is true, only base index will be selected
public BestIndexInfo selectBestMV(ScanNode scanNode) throws UserException { long start = System.currentTimeMillis(); Preconditions.checkState(scanNode instanceof OlapScanNode); OlapScanNode olapScanNode = (OlapScanNode) scanNode; ConnectContext connectContext = ConnectContext.get(); if (connectContext != null && connectContext.getSessionVariable().isUseV2Rollup()) { OlapTable tbl = olapScanNode.getOlapTable(); String v2RollupIndexName = MaterializedViewHandler.NEW_STORAGE_FORMAT_INDEX_NAME_PREFIX + tbl.getName(); Long v2RollupIndexId = tbl.getIndexIdByName(v2RollupIndexName); if (v2RollupIndexId != null) { return new BestIndexInfo(v2RollupIndexId, false, "use_v2_rollup is true"); } } Map<Long, List<Column>> candidateIndexIdToSchema = predicates(olapScanNode); long bestIndexId = priorities(olapScanNode, candidateIndexIdToSchema); LOG.debug("The best materialized view is {} for scan node {} in query {}, cost {}", bestIndexId, scanNode.getId(), selectStmt.toSql(), (System.currentTimeMillis() - start)); return new BestIndexInfo(bestIndexId, isPreAggregation, reasonOfDisable); }
if (connectContext != null && connectContext.getSessionVariable().isUseV2Rollup()) {
public BestIndexInfo selectBestMV(ScanNode scanNode) throws UserException { long start = System.currentTimeMillis(); Preconditions.checkState(scanNode instanceof OlapScanNode); OlapScanNode olapScanNode = (OlapScanNode) scanNode; Map<Long, List<Column>> candidateIndexIdToSchema = predicates(olapScanNode); long bestIndexId = priorities(olapScanNode, candidateIndexIdToSchema); LOG.debug("The best materialized view is {} for scan node {} in query {}, cost {}", bestIndexId, scanNode.getId(), selectStmt.toSql(), (System.currentTimeMillis() - start)); return new BestIndexInfo(bestIndexId, isPreAggregation, reasonOfDisable); }
class MaterializedViewSelector { private static final Logger LOG = LogManager.getLogger(MaterializedViewSelector.class); private final SelectStmt selectStmt; private final Analyzer analyzer; /** * The key of following maps is table name. * The value of following maps is column names. * `columnNamesInPredicates` means that the column names in where clause. * And so on. */ private Map<String, Set<String>> columnNamesInPredicates = Maps.newHashMap(); private boolean isSPJQuery; private Map<String, Set<String>> columnNamesInGrouping = Maps.newHashMap(); private Map<String, Set<AggregatedColumn>> aggregateColumnsInQuery = Maps.newHashMap(); private Map<String, Set<String>> columnNamesInQueryOutput = Maps.newHashMap(); private boolean disableSPJGView; private String reasonOfDisable; private boolean isPreAggregation = true; public MaterializedViewSelector(SelectStmt selectStmt, Analyzer analyzer) { this.selectStmt = selectStmt; this.analyzer = analyzer; init(); } /** * There are two stages to choosing the best MV. * Phase 1: Predicates = computeCandidateMVs * According to aggregation and column information in the select stmt, * the candidate MVs that meets the query conditions are selected. * Phase 2: Priorities = computeBestMVByCost * According to prefix index and row count in candidate MVs, * the best MV is selected. * * @param scanNode * @return */ private Map<Long, List<Column>> predicates(OlapScanNode scanNode) { Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta = scanNode.getOlapTable().getVisibleIndexIdToMeta(); OlapTable table = scanNode.getOlapTable(); Preconditions.checkState(table != null); String tableName = table.getName(); checkCompensatingPredicates(columnNamesInPredicates.get(tableName), candidateIndexIdToMeta); checkGrouping(columnNamesInGrouping.get(tableName), candidateIndexIdToMeta); checkAggregationFunction(aggregateColumnsInQuery.get(tableName), candidateIndexIdToMeta); checkOutputColumns(columnNamesInQueryOutput.get(tableName), candidateIndexIdToMeta); if (table.getKeysType() == KeysType.AGG_KEYS && candidateIndexIdToMeta.size() == 0) { /** * In Doris, it is allowed that the aggregate table should be scanned directly * while there is no aggregation info in query. * For example: * Aggregate tableA: k1, k2, sum(v1) * Query: select k1, k2, v1 from tableA * Allowed * Result: same as select k1, k2, sum(v1) from tableA group by t1, t2 * * However, the query should not be selector normally. * The reason is that the level of group by in tableA is upper then the level of group by in query. * So, we need to compensate those kinds of index in following step. * */ compensateCandidateIndex(candidateIndexIdToMeta, scanNode.getOlapTable().getVisibleIndexIdToMeta(), table); checkOutputColumns(columnNamesInQueryOutput.get(tableName), candidateIndexIdToMeta); } Map<Long, List<Column>> result = Maps.newHashMap(); for (Map.Entry<Long, MaterializedIndexMeta> entry : candidateIndexIdToMeta.entrySet()) { result.put(entry.getKey(), entry.getValue().getSchema()); } return result; } private long priorities(OlapScanNode scanNode, Map<Long, List<Column>> candidateIndexIdToSchema) { final Set<String> equivalenceColumns = Sets.newHashSet(); final Set<String> unequivalenceColumns = Sets.newHashSet(); scanNode.collectColumns(analyzer, equivalenceColumns, unequivalenceColumns); Set<Long> indexesMatchingBestPrefixIndex = matchBestPrefixIndex(candidateIndexIdToSchema, equivalenceColumns, unequivalenceColumns); return selectBestRowCountIndex(indexesMatchingBestPrefixIndex, scanNode.getOlapTable(), scanNode .getSelectedPartitionIds()); } private Set<Long> matchBestPrefixIndex(Map<Long, List<Column>> candidateIndexIdToSchema, Set<String> equivalenceColumns, Set<String> unequivalenceColumns) { if (equivalenceColumns.size() == 0 && unequivalenceColumns.size() == 0) { return candidateIndexIdToSchema.keySet(); } Set<Long> indexesMatchingBestPrefixIndex = Sets.newHashSet(); int maxPrefixMatchCount = 0; for (Map.Entry<Long, List<Column>> entry : candidateIndexIdToSchema.entrySet()) { int prefixMatchCount = 0; long indexId = entry.getKey(); List<Column> indexSchema = entry.getValue(); for (Column col : indexSchema) { if (equivalenceColumns.contains(col.getName())) { prefixMatchCount++; } else if (unequivalenceColumns.contains(col.getName())) { prefixMatchCount++; break; } else { break; } } if (prefixMatchCount == maxPrefixMatchCount) { LOG.debug("find a equal prefix match index {}. match count: {}", indexId, prefixMatchCount); indexesMatchingBestPrefixIndex.add(indexId); } else if (prefixMatchCount > maxPrefixMatchCount) { LOG.debug("find a better prefix match index {}. match count: {}", indexId, prefixMatchCount); maxPrefixMatchCount = prefixMatchCount; indexesMatchingBestPrefixIndex.clear(); indexesMatchingBestPrefixIndex.add(indexId); } } LOG.debug("Those mv match the best prefix index:" + Joiner.on(",").join(indexesMatchingBestPrefixIndex)); return indexesMatchingBestPrefixIndex; } private long selectBestRowCountIndex(Set<Long> indexesMatchingBestPrefixIndex, OlapTable olapTable, Collection<Long> partitionIds) { long minRowCount = Long.MAX_VALUE; long selectedIndexId = 0; for (Long indexId : indexesMatchingBestPrefixIndex) { long rowCount = 0; for (Long partitionId : partitionIds) { rowCount += olapTable.getPartition(partitionId).getIndex(indexId).getRowCount(); } LOG.debug("rowCount={} for table={}", rowCount, indexId); if (rowCount < minRowCount) { minRowCount = rowCount; selectedIndexId = indexId; } else if (rowCount == minRowCount) { int selectedColumnSize = olapTable.getSchemaByIndexId(selectedIndexId).size(); int currColumnSize = olapTable.getSchemaByIndexId(indexId).size(); if (currColumnSize < selectedColumnSize) { selectedIndexId = indexId; } } } String tableName = olapTable.getName(); String v2RollupIndexName = MaterializedViewHandler.NEW_STORAGE_FORMAT_INDEX_NAME_PREFIX + tableName; Long v2RollupIndexId = olapTable.getIndexIdByName(v2RollupIndexName); long baseIndexId = olapTable.getBaseIndexId(); if (v2RollupIndexId != null && v2RollupIndexId == selectedIndexId) { selectedIndexId = baseIndexId; } return selectedIndexId; } private void checkCompensatingPredicates(Set<String> columnsInPredicates, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { if (columnsInPredicates == null) { return; } Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); Set<String> indexNonAggregatedColumnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); entry.getValue().getSchema().stream().filter(column -> !column.isAggregated()) .forEach(column -> indexNonAggregatedColumnNames.add(column.getName())); if (!indexNonAggregatedColumnNames.containsAll(columnsInPredicates)) { iterator.remove(); } } LOG.debug("Those mv pass the test of compensating predicates:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } /** * View Query result * SPJ SPJG OR SPJ pass * SPJG SPJ fail * SPJG SPJG pass * 1. grouping columns in query is subset of grouping columns in view * 2. the empty grouping columns in query is subset of all of views * * @param columnsInGrouping * @param candidateIndexIdToMeta */ private void checkGrouping(Set<String> columnsInGrouping, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); Set<String> indexNonAggregatedColumnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); MaterializedIndexMeta candidateIndexMeta = entry.getValue(); List<Column> candidateIndexSchema = candidateIndexMeta.getSchema(); candidateIndexSchema.stream().filter(column -> !column.isAggregated()) .forEach(column -> indexNonAggregatedColumnNames.add(column.getName())); /* If there is no aggregated column in duplicate index, the index will be SPJ. For example: duplicate table (k1, k2, v1) duplicate mv index (k1, v1) When the candidate index is SPJ type, it passes the verification directly If there is no aggregated column in aggregate index, the index will be deduplicate index. For example: duplicate table (k1, k2, v1 sum) aggregate mv index (k1, k2) This kind of index is SPJG which same as select k1, k2 from aggregate_table group by k1, k2. It also need to check the grouping column using following steps. ISSUE-3016, MaterializedViewFunctionTest: testDeduplicateQueryInAgg */ if (indexNonAggregatedColumnNames.size() == candidateIndexSchema.size() && candidateIndexMeta.getKeysType() == KeysType.DUP_KEYS) { continue; } if (isSPJQuery || disableSPJGView) { iterator.remove(); continue; } if (columnsInGrouping == null) { continue; } if (!indexNonAggregatedColumnNames.containsAll(columnsInGrouping)) { iterator.remove(); } } LOG.debug("Those mv pass the test of grouping:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void checkAggregationFunction(Set<AggregatedColumn> aggregatedColumnsInQueryOutput, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); List<AggregatedColumn> indexAggregatedColumns = Lists.newArrayList(); List<Column> candidateIndexSchema = entry.getValue().getSchema(); candidateIndexSchema.stream().filter(column -> column.isAggregated()) .forEach(column -> indexAggregatedColumns.add( new AggregatedColumn(column.getName(), column.getAggregationType().name()))); if (indexAggregatedColumns.size() == 0) { continue; } if (isSPJQuery || disableSPJGView) { iterator.remove(); continue; } /* Situation1: The query is deduplicate SPJG when aggregatedColumnsInQueryOutput is null. * For example: select a , b from table group by a, b * The aggregation function check should be pass directly when MV is SPJG. */ if (aggregatedColumnsInQueryOutput == null) { continue; } if (!indexAggregatedColumns.containsAll(aggregatedColumnsInQueryOutput)) { iterator.remove(); } } LOG.debug("Those mv pass the test of aggregation function:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void checkOutputColumns(Set<String> columnNamesInQueryOutput, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { if (columnNamesInQueryOutput == null) { return; } Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); Set<String> indexColumnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); List<Column> candidateIndexSchema = entry.getValue().getSchema(); candidateIndexSchema.stream().forEach(column -> indexColumnNames.add(column.getName())); if (!indexColumnNames.containsAll(columnNamesInQueryOutput)) { iterator.remove(); } } LOG.debug("Those mv pass the test of output columns:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void compensateCandidateIndex(Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta, Map<Long, MaterializedIndexMeta> allVisibleIndexes, OlapTable table) { isPreAggregation = false; reasonOfDisable = "The aggregate operator does not match"; int keySizeOfBaseIndex = table.getKeyColumnsByIndexId(table.getBaseIndexId()).size(); for (Map.Entry<Long, MaterializedIndexMeta> index : allVisibleIndexes.entrySet()) { long mvIndexId = index.getKey(); if (table.getKeyColumnsByIndexId(mvIndexId).size() == keySizeOfBaseIndex) { candidateIndexIdToMeta.put(mvIndexId, index.getValue()); } } LOG.debug("Those mv pass the test of output columns:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void init() { Expr whereClause = selectStmt.getWhereClause(); if (whereClause != null) { whereClause.getTableNameToColumnNames(columnNamesInPredicates); } for (TableRef tableRef : selectStmt.getTableRefs()) { if (tableRef.getOnClause() == null) { continue; } tableRef.getOnClause().getTableNameToColumnNames(columnNamesInPredicates); } if (selectStmt.getAggInfo() == null) { isSPJQuery = true; } else { if (selectStmt.getAggInfo().getGroupingExprs() != null) { List<Expr> groupingExprs = selectStmt.getAggInfo().getGroupingExprs(); for (Expr expr : groupingExprs) { expr.getTableNameToColumnNames(columnNamesInGrouping); } } for (FunctionCallExpr aggExpr : selectStmt.getAggInfo().getAggregateExprs()) { if (aggExpr.getChildren().size() != 1) { reasonOfDisable = "aggExpr has more than one child"; disableSPJGView = true; break; } Expr aggChild0 = aggExpr.getChild(0); if (aggChild0 instanceof SlotRef) { SlotRef slotRef = (SlotRef) aggChild0; Table table = slotRef.getDesc().getParent().getTable(); /* If this column come from subquery, the parent table will be null * For example: select k1 from (select name as k1 from tableA) A * The parent table of k1 column in outer query is null. */ if (table == null) { continue; } Preconditions.checkState(slotRef.getColumnName() != null); addAggregatedColumn(slotRef.getColumnName(), aggExpr.getFnName().getFunction(), table.getName()); } else if ((aggChild0 instanceof CastExpr) && (aggChild0.getChild(0) instanceof SlotRef)) { SlotRef slotRef = (SlotRef) aggChild0.getChild(0); Table table = slotRef.getDesc().getParent().getTable(); /* * Same as above */ if (table == null) { continue; } Preconditions.checkState(slotRef.getColumnName() != null); addAggregatedColumn(slotRef.getColumnName(), aggExpr.getFnName().getFunction(), table.getName()); } else { reasonOfDisable = "aggExpr.getChild(0)[" + aggExpr.getChild(0).debugString() + "] is not SlotRef or CastExpr|CaseExpr"; disableSPJGView = true; break; } } } ArrayList<TupleId> topTupleIds = Lists.newArrayList(); selectStmt.getMaterializedTupleIds(topTupleIds); for (TupleId tupleId : topTupleIds) { TupleDescriptor tupleDescriptor = analyzer.getTupleDesc(tupleId); tupleDescriptor.getTableNameToColumnNames(columnNamesInQueryOutput); } } private void addAggregatedColumn(String columnName, String functionName, String tableName) { AggregatedColumn newAggregatedColumn = new AggregatedColumn(columnName, functionName); Set<AggregatedColumn> aggregatedColumns = aggregateColumnsInQuery.computeIfAbsent(tableName, k -> Sets.newHashSet()); aggregatedColumns.add(newAggregatedColumn); } class AggregatedColumn { private String columnName; private String aggFunctionName; public AggregatedColumn(String columnName, String aggFunctionName) { this.columnName = columnName; this.aggFunctionName = aggFunctionName; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof AggregatedColumn)) { return false; } AggregatedColumn input = (AggregatedColumn) obj; return this.columnName.equalsIgnoreCase(input.columnName) && this.aggFunctionName.equalsIgnoreCase(input.aggFunctionName); } @Override public int hashCode() { return Objects.hash(this.columnName, this.aggFunctionName); } } public class BestIndexInfo { private long bestIndexId; private boolean isPreAggregation; private String reasonOfDisable; public BestIndexInfo(long bestIndexId, boolean isPreAggregation, String reasonOfDisable) { this.bestIndexId = bestIndexId; this.isPreAggregation = isPreAggregation; this.reasonOfDisable = reasonOfDisable; } public long getBestIndexId() { return bestIndexId; } public boolean isPreAggregation() { return isPreAggregation; } public String getReasonOfDisable() { return reasonOfDisable; } } }
class MaterializedViewSelector { private static final Logger LOG = LogManager.getLogger(MaterializedViewSelector.class); private final SelectStmt selectStmt; private final Analyzer analyzer; /** * The key of following maps is table name. * The value of following maps is column names. * `columnNamesInPredicates` means that the column names in where clause. * And so on. */ private Map<String, Set<String>> columnNamesInPredicates = Maps.newHashMap(); private boolean isSPJQuery; private Map<String, Set<String>> columnNamesInGrouping = Maps.newHashMap(); private Map<String, Set<AggregatedColumn>> aggregateColumnsInQuery = Maps.newHashMap(); private Map<String, Set<String>> columnNamesInQueryOutput = Maps.newHashMap(); private boolean disableSPJGView; private String reasonOfDisable; private boolean isPreAggregation = true; public MaterializedViewSelector(SelectStmt selectStmt, Analyzer analyzer) { this.selectStmt = selectStmt; this.analyzer = analyzer; init(); } /** * There are two stages to choosing the best MV. * Phase 1: Predicates = computeCandidateMVs * According to aggregation and column information in the select stmt, * the candidate MVs that meets the query conditions are selected. * Phase 2: Priorities = computeBestMVByCost * According to prefix index and row count in candidate MVs, * the best MV is selected. * * @param scanNode * @return */ private Map<Long, List<Column>> predicates(OlapScanNode scanNode) { Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta = scanNode.getOlapTable().getVisibleIndexIdToMeta(); OlapTable table = scanNode.getOlapTable(); Preconditions.checkState(table != null); String tableName = table.getName(); checkCompensatingPredicates(columnNamesInPredicates.get(tableName), candidateIndexIdToMeta); checkGrouping(columnNamesInGrouping.get(tableName), candidateIndexIdToMeta); checkAggregationFunction(aggregateColumnsInQuery.get(tableName), candidateIndexIdToMeta); checkOutputColumns(columnNamesInQueryOutput.get(tableName), candidateIndexIdToMeta); if (table.getKeysType() == KeysType.AGG_KEYS && candidateIndexIdToMeta.size() == 0) { /** * In Doris, it is allowed that the aggregate table should be scanned directly * while there is no aggregation info in query. * For example: * Aggregate tableA: k1, k2, sum(v1) * Query: select k1, k2, v1 from tableA * Allowed * Result: same as select k1, k2, sum(v1) from tableA group by t1, t2 * * However, the query should not be selector normally. * The reason is that the level of group by in tableA is upper then the level of group by in query. * So, we need to compensate those kinds of index in following step. * */ compensateCandidateIndex(candidateIndexIdToMeta, scanNode.getOlapTable().getVisibleIndexIdToMeta(), table); checkOutputColumns(columnNamesInQueryOutput.get(tableName), candidateIndexIdToMeta); } Map<Long, List<Column>> result = Maps.newHashMap(); for (Map.Entry<Long, MaterializedIndexMeta> entry : candidateIndexIdToMeta.entrySet()) { result.put(entry.getKey(), entry.getValue().getSchema()); } return result; } private long priorities(OlapScanNode scanNode, Map<Long, List<Column>> candidateIndexIdToSchema) { OlapTable tbl = scanNode.getOlapTable(); Long v2RollupIndexId = tbl.getSegmentV2FormatIndexId(); if (v2RollupIndexId != null) { ConnectContext connectContext = ConnectContext.get(); if (connectContext != null && connectContext.getSessionVariable().isUseV2Rollup()) { if (candidateIndexIdToSchema.containsKey(v2RollupIndexId)) { return v2RollupIndexId; } } else { candidateIndexIdToSchema.remove(v2RollupIndexId); } } final Set<String> equivalenceColumns = Sets.newHashSet(); final Set<String> unequivalenceColumns = Sets.newHashSet(); scanNode.collectColumns(analyzer, equivalenceColumns, unequivalenceColumns); Set<Long> indexesMatchingBestPrefixIndex = matchBestPrefixIndex(candidateIndexIdToSchema, equivalenceColumns, unequivalenceColumns); return selectBestRowCountIndex(indexesMatchingBestPrefixIndex, scanNode.getOlapTable(), scanNode .getSelectedPartitionIds()); } private Set<Long> matchBestPrefixIndex(Map<Long, List<Column>> candidateIndexIdToSchema, Set<String> equivalenceColumns, Set<String> unequivalenceColumns) { if (equivalenceColumns.size() == 0 && unequivalenceColumns.size() == 0) { return candidateIndexIdToSchema.keySet(); } Set<Long> indexesMatchingBestPrefixIndex = Sets.newHashSet(); int maxPrefixMatchCount = 0; for (Map.Entry<Long, List<Column>> entry : candidateIndexIdToSchema.entrySet()) { int prefixMatchCount = 0; long indexId = entry.getKey(); List<Column> indexSchema = entry.getValue(); for (Column col : indexSchema) { if (equivalenceColumns.contains(col.getName())) { prefixMatchCount++; } else if (unequivalenceColumns.contains(col.getName())) { prefixMatchCount++; break; } else { break; } } if (prefixMatchCount == maxPrefixMatchCount) { LOG.debug("find a equal prefix match index {}. match count: {}", indexId, prefixMatchCount); indexesMatchingBestPrefixIndex.add(indexId); } else if (prefixMatchCount > maxPrefixMatchCount) { LOG.debug("find a better prefix match index {}. match count: {}", indexId, prefixMatchCount); maxPrefixMatchCount = prefixMatchCount; indexesMatchingBestPrefixIndex.clear(); indexesMatchingBestPrefixIndex.add(indexId); } } LOG.debug("Those mv match the best prefix index:" + Joiner.on(",").join(indexesMatchingBestPrefixIndex)); return indexesMatchingBestPrefixIndex; } private long selectBestRowCountIndex(Set<Long> indexesMatchingBestPrefixIndex, OlapTable olapTable, Collection<Long> partitionIds) { long minRowCount = Long.MAX_VALUE; long selectedIndexId = 0; for (Long indexId : indexesMatchingBestPrefixIndex) { long rowCount = 0; for (Long partitionId : partitionIds) { rowCount += olapTable.getPartition(partitionId).getIndex(indexId).getRowCount(); } LOG.debug("rowCount={} for table={}", rowCount, indexId); if (rowCount < minRowCount) { minRowCount = rowCount; selectedIndexId = indexId; } else if (rowCount == minRowCount) { int selectedColumnSize = olapTable.getSchemaByIndexId(selectedIndexId).size(); int currColumnSize = olapTable.getSchemaByIndexId(indexId).size(); if (currColumnSize < selectedColumnSize) { selectedIndexId = indexId; } } } return selectedIndexId; } private void checkCompensatingPredicates(Set<String> columnsInPredicates, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { if (columnsInPredicates == null) { return; } Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); Set<String> indexNonAggregatedColumnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); entry.getValue().getSchema().stream().filter(column -> !column.isAggregated()) .forEach(column -> indexNonAggregatedColumnNames.add(column.getName())); if (!indexNonAggregatedColumnNames.containsAll(columnsInPredicates)) { iterator.remove(); } } LOG.debug("Those mv pass the test of compensating predicates:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } /** * View Query result * SPJ SPJG OR SPJ pass * SPJG SPJ fail * SPJG SPJG pass * 1. grouping columns in query is subset of grouping columns in view * 2. the empty grouping columns in query is subset of all of views * * @param columnsInGrouping * @param candidateIndexIdToMeta */ private void checkGrouping(Set<String> columnsInGrouping, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); Set<String> indexNonAggregatedColumnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); MaterializedIndexMeta candidateIndexMeta = entry.getValue(); List<Column> candidateIndexSchema = candidateIndexMeta.getSchema(); candidateIndexSchema.stream().filter(column -> !column.isAggregated()) .forEach(column -> indexNonAggregatedColumnNames.add(column.getName())); /* If there is no aggregated column in duplicate index, the index will be SPJ. For example: duplicate table (k1, k2, v1) duplicate mv index (k1, v1) When the candidate index is SPJ type, it passes the verification directly If there is no aggregated column in aggregate index, the index will be deduplicate index. For example: duplicate table (k1, k2, v1 sum) aggregate mv index (k1, k2) This kind of index is SPJG which same as select k1, k2 from aggregate_table group by k1, k2. It also need to check the grouping column using following steps. ISSUE-3016, MaterializedViewFunctionTest: testDeduplicateQueryInAgg */ if (indexNonAggregatedColumnNames.size() == candidateIndexSchema.size() && candidateIndexMeta.getKeysType() == KeysType.DUP_KEYS) { continue; } if (isSPJQuery || disableSPJGView) { iterator.remove(); continue; } if (columnsInGrouping == null) { continue; } if (!indexNonAggregatedColumnNames.containsAll(columnsInGrouping)) { iterator.remove(); } } LOG.debug("Those mv pass the test of grouping:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void checkAggregationFunction(Set<AggregatedColumn> aggregatedColumnsInQueryOutput, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); List<AggregatedColumn> indexAggregatedColumns = Lists.newArrayList(); List<Column> candidateIndexSchema = entry.getValue().getSchema(); candidateIndexSchema.stream().filter(column -> column.isAggregated()) .forEach(column -> indexAggregatedColumns.add( new AggregatedColumn(column.getName(), column.getAggregationType().name()))); if (indexAggregatedColumns.size() == 0) { continue; } if (isSPJQuery || disableSPJGView) { iterator.remove(); continue; } /* Situation1: The query is deduplicate SPJG when aggregatedColumnsInQueryOutput is null. * For example: select a , b from table group by a, b * The aggregation function check should be pass directly when MV is SPJG. */ if (aggregatedColumnsInQueryOutput == null) { continue; } if (!indexAggregatedColumns.containsAll(aggregatedColumnsInQueryOutput)) { iterator.remove(); } } LOG.debug("Those mv pass the test of aggregation function:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void checkOutputColumns(Set<String> columnNamesInQueryOutput, Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta) { if (columnNamesInQueryOutput == null) { return; } Iterator<Map.Entry<Long, MaterializedIndexMeta>> iterator = candidateIndexIdToMeta.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Long, MaterializedIndexMeta> entry = iterator.next(); Set<String> indexColumnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); List<Column> candidateIndexSchema = entry.getValue().getSchema(); candidateIndexSchema.stream().forEach(column -> indexColumnNames.add(column.getName())); if (!indexColumnNames.containsAll(columnNamesInQueryOutput)) { iterator.remove(); } } LOG.debug("Those mv pass the test of output columns:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void compensateCandidateIndex(Map<Long, MaterializedIndexMeta> candidateIndexIdToMeta, Map<Long, MaterializedIndexMeta> allVisibleIndexes, OlapTable table) { isPreAggregation = false; reasonOfDisable = "The aggregate operator does not match"; int keySizeOfBaseIndex = table.getKeyColumnsByIndexId(table.getBaseIndexId()).size(); for (Map.Entry<Long, MaterializedIndexMeta> index : allVisibleIndexes.entrySet()) { long mvIndexId = index.getKey(); if (table.getKeyColumnsByIndexId(mvIndexId).size() == keySizeOfBaseIndex) { candidateIndexIdToMeta.put(mvIndexId, index.getValue()); } } LOG.debug("Those mv pass the test of output columns:" + Joiner.on(",").join(candidateIndexIdToMeta.keySet())); } private void init() { Expr whereClause = selectStmt.getWhereClause(); if (whereClause != null) { whereClause.getTableNameToColumnNames(columnNamesInPredicates); } for (TableRef tableRef : selectStmt.getTableRefs()) { if (tableRef.getOnClause() == null) { continue; } tableRef.getOnClause().getTableNameToColumnNames(columnNamesInPredicates); } if (selectStmt.getAggInfo() == null) { isSPJQuery = true; } else { if (selectStmt.getAggInfo().getGroupingExprs() != null) { List<Expr> groupingExprs = selectStmt.getAggInfo().getGroupingExprs(); for (Expr expr : groupingExprs) { expr.getTableNameToColumnNames(columnNamesInGrouping); } } for (FunctionCallExpr aggExpr : selectStmt.getAggInfo().getAggregateExprs()) { if (aggExpr.getChildren().size() != 1) { reasonOfDisable = "aggExpr has more than one child"; disableSPJGView = true; break; } Expr aggChild0 = aggExpr.getChild(0); if (aggChild0 instanceof SlotRef) { SlotRef slotRef = (SlotRef) aggChild0; Table table = slotRef.getDesc().getParent().getTable(); /* If this column come from subquery, the parent table will be null * For example: select k1 from (select name as k1 from tableA) A * The parent table of k1 column in outer query is null. */ if (table == null) { continue; } Preconditions.checkState(slotRef.getColumnName() != null); addAggregatedColumn(slotRef.getColumnName(), aggExpr.getFnName().getFunction(), table.getName()); } else if ((aggChild0 instanceof CastExpr) && (aggChild0.getChild(0) instanceof SlotRef)) { SlotRef slotRef = (SlotRef) aggChild0.getChild(0); Table table = slotRef.getDesc().getParent().getTable(); /* * Same as above */ if (table == null) { continue; } Preconditions.checkState(slotRef.getColumnName() != null); addAggregatedColumn(slotRef.getColumnName(), aggExpr.getFnName().getFunction(), table.getName()); } else { reasonOfDisable = "aggExpr.getChild(0)[" + aggExpr.getChild(0).debugString() + "] is not SlotRef or CastExpr|CaseExpr"; disableSPJGView = true; break; } } } ArrayList<TupleId> topTupleIds = Lists.newArrayList(); selectStmt.getMaterializedTupleIds(topTupleIds); for (TupleId tupleId : topTupleIds) { TupleDescriptor tupleDescriptor = analyzer.getTupleDesc(tupleId); tupleDescriptor.getTableNameToColumnNames(columnNamesInQueryOutput); } } private void addAggregatedColumn(String columnName, String functionName, String tableName) { AggregatedColumn newAggregatedColumn = new AggregatedColumn(columnName, functionName); Set<AggregatedColumn> aggregatedColumns = aggregateColumnsInQuery.computeIfAbsent(tableName, k -> Sets.newHashSet()); aggregatedColumns.add(newAggregatedColumn); } class AggregatedColumn { private String columnName; private String aggFunctionName; public AggregatedColumn(String columnName, String aggFunctionName) { this.columnName = columnName; this.aggFunctionName = aggFunctionName; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof AggregatedColumn)) { return false; } AggregatedColumn input = (AggregatedColumn) obj; return this.columnName.equalsIgnoreCase(input.columnName) && this.aggFunctionName.equalsIgnoreCase(input.aggFunctionName); } @Override public int hashCode() { return Objects.hash(this.columnName, this.aggFunctionName); } } public class BestIndexInfo { private long bestIndexId; private boolean isPreAggregation; private String reasonOfDisable; public BestIndexInfo(long bestIndexId, boolean isPreAggregation, String reasonOfDisable) { this.bestIndexId = bestIndexId; this.isPreAggregation = isPreAggregation; this.reasonOfDisable = reasonOfDisable; } public long getBestIndexId() { return bestIndexId; } public boolean isPreAggregation() { return isPreAggregation; } public String getReasonOfDisable() { return reasonOfDisable; } } }
Need to update the doc comment with the new grammar. Applicable for all places.
private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody, onFailClause); }
if (peek().kind == SyntaxKind.ON_KEYWORD) {
private STNode parseWhileStatement() { startContext(ParserRuleContext.WHILE_BLOCK); STNode whileKeyword = parseWhileKeyword(); STNode condition = parseExpression(); STNode whileBody = parseBlockNode(); endContext(); STNode onFailClause = parseOptionalOnFailClause(); return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody, onFailClause); }
class BallerinaParser extends AbstractParser { private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.DEFAULT; protected BallerinaParser(AbstractTokenReader tokenReader) { super(tokenReader, new BallerinaParserErrorHandler(tokenReader)); } /** * Start parsing the given input. * * @return Parsed node */ @Override public STNode parse() { return parseCompUnit(); } /** * Start parsing the input from a given context. Supported starting points are: * <ul> * <li>Module part (a file)</li> * <li>Top level node</li> * <li>Statement</li> * <li>Expression</li> * </ul> * * @param context Context to start parsing * @return Parsed node */ public STNode parse(ParserRuleContext context) { switch (context) { case COMP_UNIT: return parseCompUnit(); case TOP_LEVEL_NODE: startContext(ParserRuleContext.COMP_UNIT); return parseTopLevelNode(); case STATEMENT: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); return parseStatement(); case EXPRESSION: startContext(ParserRuleContext.COMP_UNIT); startContext(ParserRuleContext.FUNC_BODY_BLOCK); startContext(ParserRuleContext.STATEMENT); return parseExpression(); default: throw new UnsupportedOperationException("Cannot start parsing from: " + context); } } /* * Private methods. */ /** * Parse a given input and returns the AST. Starts parsing from the top of a compilation unit. * * @return Parsed node */ private STNode parseCompUnit() { startContext(ParserRuleContext.COMP_UNIT); List<STNode> otherDecls = new ArrayList<>(); List<STNode> importDecls = new ArrayList<>(); boolean processImports = true; STToken token = peek(); while (token.kind != SyntaxKind.EOF_TOKEN) { STNode decl = parseTopLevelNode(); if (decl == null) { break; } if (decl.kind == SyntaxKind.IMPORT_DECLARATION) { if (processImports) { importDecls.add(decl); } else { updateLastNodeInListWithInvalidNode(otherDecls, decl, DiagnosticErrorCode.ERROR_IMPORT_DECLARATION_AFTER_OTHER_DECLARATIONS); } } else { if (processImports) { processImports = false; } otherDecls.add(decl); } token = peek(); } STToken eof = consume(); endContext(); return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls), STNodeFactory.createNodeList(otherDecls), eof); } /** * Parse top level node having an optional modifier preceding it. * * @return Parsed node */ protected STNode parseTopLevelNode() { STToken nextToken = peek(); STNode metadata; switch (nextToken.kind) { case EOF_TOKEN: return null; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); return parseTopLevelNode(metadata); case IMPORT_KEYWORD: case FINAL_KEYWORD: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case SERVICE_KEYWORD: case ENUM_KEYWORD: case TRANSACTIONAL_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(STNodeFactory.createEmptyNode(), null); } default: if (isTypeStartingToken(nextToken.kind) && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { metadata = STNodeFactory.createEmptyNode(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE); if (solution.action == Action.KEEP) { metadata = STNodeFactory.createEmptyNodeList(); break; } return parseTopLevelNode(); } return parseTopLevelNode(metadata); } /** * Parse top level node having an optional modifier preceding it, given the next token kind. * * @param metadata Next token kind * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata) { STToken nextToken = peek(); STNode qualifier = null; switch (nextToken.kind) { case EOF_TOKEN: if (metadata != null) { addInvalidNodeToNextToken(metadata, DiagnosticErrorCode.ERROR_INVALID_METADATA); } return null; case PUBLIC_KEYWORD: qualifier = parseQualifier(); break; case FUNCTION_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case IMPORT_KEYWORD: case ANNOTATION_KEYWORD: case XMLNS_KEYWORD: case ENUM_KEYWORD: case TRANSACTIONAL_KEYWORD: break; case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, null); } default: if (isTypeStartingToken(nextToken.kind) && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata); if (solution.action == Action.KEEP) { qualifier = STNodeFactory.createEmptyNode(); break; } return parseTopLevelNode(metadata); } return parseTopLevelNode(metadata, qualifier); } /** * Check whether the cursor is at the start of a module level var-decl. * * @param lookahead Offset of the token to to check * @return <code>true</code> if the cursor is at the start of a module level var-decl. * <code>false</code> otherwise. */ private boolean isModuleVarDeclStart(int lookahead) { STToken nextToken = peek(lookahead + 1); switch (nextToken.kind) { case EQUAL_TOKEN: case OPEN_BRACKET_TOKEN: case QUESTION_MARK_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: switch (peek(lookahead + 2).kind) { case EQUAL_TOKEN: case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return false; } case COLON_TOKEN: if (lookahead > 1) { return false; } switch (peek(lookahead + 2).kind) { case IDENTIFIER_TOKEN: return isModuleVarDeclStart(lookahead + 2); case EOF_TOKEN: return true; default: return false; } default: return false; } } /** * Parse import declaration. * <p> * <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code> * * @return Parsed node */ private STNode parseImportDecl() { startContext(ParserRuleContext.IMPORT_DECL); this.tokenReader.startMode(ParserMode.IMPORT); STNode importKeyword = parseImportKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME); STNode importDecl = parseImportDecl(importKeyword, identifier); this.tokenReader.endMode(); endContext(); return importDecl; } /** * Parse import keyword. * * @return Parsed node */ private STNode parseImportKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IMPORT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IMPORT_KEYWORD); return parseImportKeyword(); } } /** * Parse identifier. * * @return Parsed node */ private STNode parseIdentifier(ParserRuleContext currentCtx) { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else if (token.kind == SyntaxKind.MAP_KEYWORD) { STToken mapKeyword = consume(); return STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(), mapKeyword.trailingMinutiae(), mapKeyword.diagnostics()); } else { recover(token, currentCtx); return parseIdentifier(currentCtx); } } /** * Parse RHS of the import declaration. This includes the components after the * starting identifier (org-name/module-name) of the import decl. * * @param importKeyword Import keyword * @param identifier Org-name or the module name * @return Parsed node */ private STNode parseImportDecl(STNode importKeyword, STNode identifier) { STToken nextToken = peek(); STNode orgName; STNode moduleName; STNode version; STNode alias; switch (nextToken.kind) { case SLASH_TOKEN: STNode slash = parseSlashToken(); orgName = STNodeFactory.createImportOrgNameNode(identifier, slash); moduleName = parseModuleName(); version = parseVersion(); alias = parseImportPrefixDecl(); break; case DOT_TOKEN: case VERSION_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(identifier); version = parseVersion(); alias = parseImportPrefixDecl(); break; case AS_KEYWORD: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(identifier); version = STNodeFactory.createEmptyNode(); alias = parseImportPrefixDecl(); break; case SEMICOLON_TOKEN: orgName = STNodeFactory.createEmptyNode(); moduleName = parseModuleName(identifier); version = STNodeFactory.createEmptyNode(); alias = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier); return parseImportDecl(importKeyword, identifier); } STNode semicolon = parseSemicolon(); return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon); } /** * parse slash token. * * @return Parsed node */ private STNode parseSlashToken() { STToken token = peek(); if (token.kind == SyntaxKind.SLASH_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SLASH); return parseSlashToken(); } } /** * Parse dot token. * * @return Parsed node */ private STNode parseDotToken() { STToken token = peek(); if (token.kind == SyntaxKind.DOT_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.DOT); return parseDotToken(); } } /** * Parse module name of a import declaration. * * @return Parsed node */ private STNode parseModuleName() { STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME); return parseModuleName(moduleNameStart); } /** * Parse import module name of a import declaration, given the module name start identifier. * * @return Parsed node */ private STNode parseModuleName(STNode moduleNameStart) { List<STNode> moduleNameParts = new ArrayList<>(); moduleNameParts.add(moduleNameStart); STToken nextToken = peek(); while (!isEndOfImportModuleName(nextToken)) { moduleNameParts.add(parseDotToken()); moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME)); nextToken = peek(); } return STNodeFactory.createNodeList(moduleNameParts); } private boolean isEndOfImportModuleName(STToken nextToken) { return nextToken.kind != SyntaxKind.DOT_TOKEN && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN; } private boolean isEndOfImportDecl(STToken nextToken) { switch (nextToken.kind) { case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case TYPE_KEYWORD: case ABSTRACT_KEYWORD: case CONST_KEYWORD: case EOF_TOKEN: case SERVICE_KEYWORD: case IMPORT_KEYWORD: case FINAL_KEYWORD: case TRANSACTIONAL_KEYWORD: return true; default: return false; } } /** * Parse version component of a import declaration. * <p> * <code>version-decl := version sem-ver</code> * * @return Parsed node */ private STNode parseVersion() { STToken nextToken = peek(); switch (nextToken.kind) { case VERSION_KEYWORD: STNode versionKeyword = parseVersionKeyword(); STNode versionNumber = parseVersionNumber(); return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber); case AS_KEYWORD: case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextToken)) { return STNodeFactory.createEmptyNode(); } recover(peek(), ParserRuleContext.IMPORT_VERSION_DECL); return parseVersion(); } } /** * Parse version keyword. * * @return Parsed node */ private STNode parseVersionKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.VERSION_KEYWORD); return parseVersionKeyword(); } } /** * Parse version number. * <p> * <code>sem-ver := major-num [. minor-num [. patch-num]] * <br/> * major-num := DecimalNumber * <br/> * minor-num := DecimalNumber * <br/> * patch-num := DecimalNumber * </code> * * @return Parsed node */ private STNode parseVersionNumber() { STToken nextToken = peek(); STNode majorVersion; switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: majorVersion = parseMajorVersion(); break; default: recover(peek(), ParserRuleContext.VERSION_NUMBER); return parseVersionNumber(); } List<STNode> versionParts = new ArrayList<>(); versionParts.add(majorVersion); STNode minorVersionEnd = parseSubVersionEnd(); if (minorVersionEnd != null) { versionParts.add(minorVersionEnd); STNode minorVersion = parseMinorVersion(); versionParts.add(minorVersion); STNode patchVersionEnd = parseSubVersionEnd(); if (patchVersionEnd != null) { versionParts.add(patchVersionEnd); STNode patchVersion = parsePatchVersion(); versionParts.add(patchVersion); } } return STNodeFactory.createNodeList(versionParts); } private STNode parseMajorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION); } private STNode parseMinorVersion() { return parseDecimalIntLiteral(ParserRuleContext.MINOR_VERSION); } private STNode parsePatchVersion() { return parseDecimalIntLiteral(ParserRuleContext.PATCH_VERSION); } /** * Parse decimal literal. * * @param context Context in which the decimal literal is used. * @return Parsed node */ private STNode parseDecimalIntLiteral(ParserRuleContext context) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) { return consume(); } else { recover(peek(), context); return parseDecimalIntLiteral(context); } } private STNode parseSubVersionEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case AS_KEYWORD: case SEMICOLON_TOKEN: case EOF_TOKEN: return null; case DOT_TOKEN: return parseDotToken(); default: recover(nextToken, ParserRuleContext.IMPORT_SUB_VERSION); return parseSubVersionEnd(); } } /** * Parse import prefix declaration. * <p> * <code>import-prefix-decl := as import-prefix * <br/> * import-prefix := a identifier | _ * </code> * * @return Parsed node */ private STNode parseImportPrefixDecl() { STToken nextToken = peek(); switch (nextToken.kind) { case AS_KEYWORD: STNode asKeyword = parseAsKeyword(); STNode prefix = parseImportPrefix(); return STNodeFactory.createImportPrefixNode(asKeyword, prefix); case SEMICOLON_TOKEN: return STNodeFactory.createEmptyNode(); default: if (isEndOfImportDecl(nextToken)) { return STNodeFactory.createEmptyNode(); } recover(peek(), ParserRuleContext.IMPORT_PREFIX_DECL); return parseImportPrefixDecl(); } } /** * Parse <code>as</code> keyword. * * @return Parsed node */ private STNode parseAsKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AS_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.AS_KEYWORD); return parseAsKeyword(); } } /** * Parse import prefix. * * @return Parsed node */ private STNode parseImportPrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.IMPORT_PREFIX); return parseImportPrefix(); } } /** * Parse top level node, given the modifier that precedes it. * * @param qualifier Qualifier that precedes the top level node * @return Parsed node */ private STNode parseTopLevelNode(STNode metadata, STNode qualifier) { STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: reportInvalidQualifier(qualifier); return null; case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: List<STNode> qualifiers = new ArrayList<>(); if (qualifier != null) { qualifiers.add(qualifier); } return parseFuncDefOrFuncTypeDesc(ParserRuleContext.TOP_LEVEL_FUNC_DEF_OR_FUNC_TYPE_DESC, metadata, false, qualifiers); case TYPE_KEYWORD: return parseModuleTypeDefinition(metadata, getQualifier(qualifier)); case LISTENER_KEYWORD: return parseListenerDeclaration(metadata, getQualifier(qualifier)); case CONST_KEYWORD: return parseConstantDeclaration(metadata, getQualifier(qualifier)); case ANNOTATION_KEYWORD: STNode constKeyword = STNodeFactory.createEmptyNode(); return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword); case IMPORT_KEYWORD: reportInvalidQualifier(qualifier); return parseImportDecl(); case XMLNS_KEYWORD: reportInvalidQualifier(qualifier); return parseXMLNamespaceDeclaration(true); case FINAL_KEYWORD: reportInvalidQualifier(qualifier); STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(metadata, finalKeyword, true); case SERVICE_KEYWORD: if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) { reportInvalidQualifier(qualifier); return parseServiceDecl(metadata); } return parseModuleVarDecl(metadata, qualifier); case ENUM_KEYWORD: return parseEnumDeclaration(metadata, getQualifier(qualifier)); case IDENTIFIER_TOKEN: if (isModuleVarDeclStart(1)) { return parseModuleVarDecl(metadata, qualifier); } default: if (isTypeStartingToken(nextToken.kind) && nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) { return parseModuleVarDecl(metadata, qualifier); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier); if (solution.action == Action.KEEP) { return parseModuleVarDecl(metadata, qualifier); } return parseTopLevelNode(metadata, qualifier); } } private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) { reportInvalidQualifier(qualifier); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(metadata, finalKeyword, true); } private STNode getQualifier(STNode qualifier) { return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier; } private void reportInvalidQualifier(STNode qualifier) { if (qualifier != null && qualifier.kind != SyntaxKind.NONE) { addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_INVALID_QUALIFIER, qualifier.toString().trim()); } } /** * Parse access modifiers. * * @return Parsed node */ private STNode parseQualifier() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.PUBLIC_KEYWORD); return parseQualifier(); } } private STNode parseFuncDefinition(STNode metadata, boolean isObjectMethod, STNode qualifiers) { startContext(ParserRuleContext.FUNC_DEF); STNode functionKeyword = parseFunctionKeyword(); STNode funcDef = parseFunctionKeywordRhs(metadata, functionKeyword, true, isObjectMethod, qualifiers); return funcDef; } /** * Parse function definition for the function type descriptor. * <p> * <code> * function-defn := FUNCTION identifier function-signature function-body * <br/> * function-type-descriptor := function function-signature * </code> * * @param metadata Metadata * @param qualifiers qualifier list * @return Parsed node */ private STNode parseFuncDefOrFuncTypeDesc(ParserRuleContext context, STNode metadata, boolean isObjectMember, List<STNode> qualifiers) { STNode qualifierList = parseFunctionQualifiers(context, qualifiers); return parseFuncDefOrFuncTypeDesc(metadata, isObjectMember, qualifierList); } private STNode parseFuncDefOrFuncTypeDesc(STNode metadata, boolean isObjectMember, STNode qualifiers) { startContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE); STNode functionKeyword = parseFunctionKeyword(); STNode funcDefOrType = parseFunctionKeywordRhs(metadata, functionKeyword, false, isObjectMember, qualifiers); return funcDefOrType; } private STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isFuncDef, boolean isObjectMember, STNode qualifiers) { if (isFuncDef) { STNode name = parseFunctionName(); switchContext(ParserRuleContext.FUNC_DEF); STNode funcSignature = parseFuncSignature(false); STNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMember, name, funcSignature, qualifiers); endContext(); return funcDef; } return parseFunctionKeywordRhs(metadata, functionKeyword, isObjectMember, qualifiers); } private STNode parseFunctionKeywordRhs(STNode metadata, STNode functionKeyword, boolean isObjectMember, STNode qualifiers) { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode name = parseFunctionName(); switchContext(ParserRuleContext.FUNC_DEF); STNode funcSignature = parseFuncSignature(false); STNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMember, name, funcSignature, qualifiers); endContext(); return funcDef; case OPEN_PAREN_TOKEN: funcSignature = parseFuncSignature(true); return parseReturnTypeDescRhs(metadata, functionKeyword, funcSignature, isObjectMember, qualifiers); default: STToken token = peek(); recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata, functionKeyword, isObjectMember, qualifiers); return parseFunctionKeywordRhs(metadata, functionKeyword, isObjectMember, qualifiers); } } private STNode createFuncDefOrMethodDecl(STNode metadata, STNode functionKeyword, boolean isObjectMethod, STNode name, STNode funcSignature, STNode qualifierList) { STNode body = parseFunctionBody(isObjectMethod); if (body.kind == SyntaxKind.SEMICOLON_TOKEN) { return STNodeFactory.createMethodDeclarationNode(metadata, qualifierList, functionKeyword, name, funcSignature, body); } if (isObjectMethod) { return STNodeFactory.createFunctionDefinitionNode(SyntaxKind.OBJECT_METHOD_DEFINITION, metadata, qualifierList, functionKeyword, name, funcSignature, body); } return STNodeFactory.createFunctionDefinitionNode(SyntaxKind.FUNCTION_DEFINITION, metadata, qualifierList, functionKeyword, name, funcSignature, body); } /** * Parse function signature. * <p> * <code> * function-signature := ( param-list ) return-type-descriptor * <br/> * return-type-descriptor := [ returns [annots] type-descriptor ] * </code> * * @param isParamNameOptional Whether the parameter names are optional * @return Function signature node */ private STNode parseFuncSignature(boolean isParamNameOptional) { STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode parameters = parseParamList(isParamNameOptional); STNode closeParenthesis = parseCloseParenthesis(); endContext(); STNode returnTypeDesc = parseFuncReturnTypeDescriptor(); return STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc); } private STNode parseReturnTypeDescRhs(STNode metadata, STNode functionKeyword, STNode funcSignature, boolean isObjectMember, STNode qualifiers) { STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACKET_TOKEN: endContext(); STNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); return parseVarDeclWithFunctionType(typeDesc, isObjectMember, qualifiers, metadata); case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: break; default: if (isValidTypeContinuationToken(nextToken)) { endContext(); typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TOP_LEVEL_FUNC_DEF_OR_FUNC_TYPE_DESC, false); return parseVarDeclWithFunctionType(typeDesc, isObjectMember, qualifiers, metadata); } break; } STNode name = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_FUNCTION_NAME); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcDef = createFuncDefOrMethodDecl(metadata, functionKeyword, isObjectMember, name, funcSignature, qualifiers); endContext(); return funcDef; } private STNode parseVarDeclWithFunctionType(STNode typeDesc, boolean isObjectMember, STNode qualifiers, STNode metadata) { STNodeList qualifierList = (STNodeList) qualifiers; STNode visibilityQualifier = STNodeFactory.createEmptyNode(); for (int position = 0; position < qualifierList.size(); position++) { STNode qualifier = qualifierList.get(position); if (isObjectMember && isVisibilityQualifier(qualifier)) { visibilityQualifier = qualifier; } else { typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED); } } if (isObjectMember) { STNode readonlyQualifier = STNodeFactory.createEmptyNode(); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, visibilityQualifier, readonlyQualifier, typeDesc, fieldName); } startContext(ParserRuleContext.VAR_DECL_STMT); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(metadata, STNodeFactory.createEmptyNode(), typedBindingPattern, true); } private boolean isVisibilityQualifier(STNode qualifier) { switch (qualifier.kind) { case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: return true; default: return false; } } /** * Validate the param list and return. If there are params without param-name, * then this method will create a new set of params with missing param-name * and return. * * @param signature Function signature * @return */ private STNode validateAndGetFuncParams(STFunctionSignatureNode signature) { STNode parameters = signature.parameters; int paramCount = parameters.bucketCount(); int index = 0; for (; index < paramCount; index++) { STNode param = parameters.childInBucket(index); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { break; } continue; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { break; } continue; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { break; } continue; default: continue; } break; } if (index == paramCount) { return signature; } STNode updatedParams = getUpdatedParamList(parameters, index); return STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams, signature.closeParenToken, signature.returnTypeDesc); } private STNode getUpdatedParamList(STNode parameters, int index) { int paramCount = parameters.bucketCount(); int newIndex = 0; ArrayList<STNode> newParams = new ArrayList<>(); for (; newIndex < index; newIndex++) { newParams.add(parameters.childInBucket(index)); } for (; newIndex < paramCount; newIndex++) { STNode param = parameters.childInBucket(newIndex); STNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); switch (param.kind) { case REQUIRED_PARAM: STRequiredParameterNode requiredParam = (STRequiredParameterNode) param; if (isEmpty(requiredParam.paramName)) { param = STNodeFactory .createRequiredParameterNode(requiredParam.annotations, requiredParam.typeName, paramName); } break; case DEFAULTABLE_PARAM: STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param; if (isEmpty(defaultableParam.paramName)) { param = STNodeFactory .createDefaultableParameterNode(defaultableParam.annotations, defaultableParam.typeName, paramName, defaultableParam.equalsToken, defaultableParam.expression); } break; case REST_PARAM: STRestParameterNode restParam = (STRestParameterNode) param; if (isEmpty(restParam.paramName)) { param = STNodeFactory.createRestParameterNode(restParam.annotations, restParam.typeName, restParam.ellipsisToken, paramName); } break; default: break; } newParams.add(param); } return STNodeFactory.createNodeList(newParams); } private boolean isEmpty(STNode node) { return !SyntaxUtils.isSTNodePresent(node); } /** * Parse function keyword. Need to validate the token before consuming, * since we can reach here while recovering. * * @return Parsed node */ private STNode parseFunctionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_KEYWORD); return parseFunctionKeyword(); } } /** * Parse function name. * * @return Parsed node */ private STNode parseFunctionName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.FUNC_NAME); return parseFunctionName(); } } /** * Parse open parenthesis. * * @param ctx Context of the parenthesis * @return Parsed node */ private STNode parseOpenParenthesis(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) { return consume(); } else { recover(token, ctx); return parseOpenParenthesis(ctx); } } /** * Parse close parenthesis. * * @return Parsed node */ private STNode parseCloseParenthesis() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSE_PARENTHESIS); return parseCloseParenthesis(); } } /** * <p> * Parse parameter list. * </p> * <code> * param-list := required-params [, defaultable-params] [, rest-param] * <br/>&nbsp;| defaultable-params [, rest-param] * <br/>&nbsp;| [rest-param] * <br/><br/> * required-params := required-param (, required-param)* * <br/><br/> * required-param := [annots] [public] type-descriptor [param-name] * <br/><br/> * defaultable-params := defaultable-param (, defaultable-param)* * <br/><br/> * defaultable-param := [annots] [public] type-descriptor [param-name] default-value * <br/><br/> * rest-param := [annots] type-descriptor ... [param-name] * <br/><br/> * param-name := identifier * </code> * * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParamList(boolean isParamNameOptional) { startContext(ParserRuleContext.PARAM_LIST); STToken token = peek(); if (isEndOfParametersList(token.kind)) { return STNodeFactory.createEmptyNodeList(); } ArrayList<STNode> paramsList = new ArrayList<>(); startContext(ParserRuleContext.REQUIRED_PARAM); STNode firstParam = parseParameter(SyntaxKind.REQUIRED_PARAM, isParamNameOptional); SyntaxKind prevParamKind = firstParam.kind; paramsList.add(firstParam); boolean paramOrderErrorPresent = false; token = peek(); while (!isEndOfParametersList(token.kind)) { STNode paramEnd = parseParameterRhs(); if (paramEnd == null) { break; } endContext(); if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) { startContext(ParserRuleContext.DEFAULTABLE_PARAM); } else { startContext(ParserRuleContext.REQUIRED_PARAM); } STNode param = parseParameter(prevParamKind, isParamNameOptional); if (paramOrderErrorPresent) { updateLastNodeInListWithInvalidNode(paramsList, paramEnd, null); updateLastNodeInListWithInvalidNode(paramsList, param, null); } else { DiagnosticCode paramOrderError = validateParamOrder(param, prevParamKind); if (paramOrderError == null) { paramsList.add(paramEnd); paramsList.add(param); } else { paramOrderErrorPresent = true; updateLastNodeInListWithInvalidNode(paramsList, paramEnd, paramOrderError); updateLastNodeInListWithInvalidNode(paramsList, param, null); } } prevParamKind = param.kind; token = peek(); } endContext(); return STNodeFactory.createNodeList(paramsList); } /** * Return the appropriate {@code DiagnosticCode} if there are parameter order issues. * * @param param the new parameter * @param prevParamKind the SyntaxKind of the previously added parameter */ private DiagnosticCode validateParamOrder(STNode param, SyntaxKind prevParamKind) { if (prevParamKind == SyntaxKind.REST_PARAM) { return DiagnosticErrorCode.ERROR_PARAMETER_AFTER_THE_REST_PARAMETER; } else if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM && param.kind == SyntaxKind.REQUIRED_PARAM) { return DiagnosticErrorCode.ERROR_REQUIRED_PARAMETER_AFTER_THE_DEFAULTABLE_PARAMETER; } else { return null; } } private boolean isNodeWithSyntaxKindInList(List<STNode> nodeList, SyntaxKind kind) { for (STNode node : nodeList) { if (node.kind == kind) { return true; } } return false; } private STNode parseParameterRhs() { return parseParameterRhs(peek().kind); } private STNode parseParameterRhs(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.PARAM_END); return parseParameterRhs(); } } /** * Parse a single parameter. Parameter can be a required parameter, a defaultable * parameter, or a rest parameter. * * @param prevParamKind Kind of the parameter that precedes current parameter * @param isParamNameOptional Whether the param names in the signature is optional or not. * @return Parsed node */ private STNode parseParameter(SyntaxKind prevParamKind, boolean isParamNameOptional) { STNode annots; STToken nextToken = peek(); switch (nextToken.kind) { case AT_TOKEN: annots = parseOptionalAnnotations(); break; case IDENTIFIER_TOKEN: annots = STNodeFactory.createEmptyNodeList(); break; default: if (isTypeStartingToken(nextToken.kind)) { annots = STNodeFactory.createEmptyNodeList(); break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, isParamNameOptional); if (solution.action == Action.KEEP) { annots = STNodeFactory.createEmptyNodeList(); break; } return parseParameter(prevParamKind, isParamNameOptional); } STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode param = parseAfterParamType(prevParamKind, annots, type, isParamNameOptional); return param; } private STNode parseAfterParamType(SyntaxKind prevParamKind, STNode annots, STNode type, boolean isParamNameOptional) { STNode paramName; STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: switchContext(ParserRuleContext.REST_PARAM); STNode ellipsis = parseEllipsis(); if (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { paramName = STNodeFactory.createEmptyNode(); } else { paramName = parseVariableName(); } return STNodeFactory.createRestParameterNode(annots, type, ellipsis, paramName); case IDENTIFIER_TOKEN: paramName = parseVariableName(); return parseParameterRhs(prevParamKind, annots, type, paramName); case EQUAL_TOKEN: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, annots, type, paramName); default: if (!isParamNameOptional) { break; } paramName = STNodeFactory.createEmptyNode(); return parseParameterRhs(prevParamKind, annots, type, paramName); } recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, annots, type, isParamNameOptional); return parseAfterParamType(prevParamKind, annots, type, isParamNameOptional); } /** * Parse ellipsis. * * @return Parsed node */ private STNode parseEllipsis() { STToken token = peek(); if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ELLIPSIS); return parseEllipsis(); } } /** * <p> * Parse the right hand side of a required/defaultable parameter. * </p> * <code>parameter-rhs := [= expression]</code> * * @param prevParamKind Kind of the parameter that precedes current parameter * @param annots Annotations attached to the parameter * @param type Type descriptor * @param paramName Name of the parameter * @return Parsed parameter node */ private STNode parseParameterRhs(SyntaxKind prevParamKind, STNode annots, STNode type, STNode paramName) { STToken nextToken = peek(); if (isEndOfParameter(nextToken.kind)) { return STNodeFactory.createRequiredParameterNode(annots, type, paramName); } else if (nextToken.kind == SyntaxKind.EQUAL_TOKEN) { if (prevParamKind == SyntaxKind.REQUIRED_PARAM) { switchContext(ParserRuleContext.DEFAULTABLE_PARAM); } STNode equal = parseAssignOp(); STNode expr = parseExpression(); return STNodeFactory.createDefaultableParameterNode(annots, type, paramName, equal, expr); } else { recover(nextToken, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, annots, type, paramName); return parseParameterRhs(prevParamKind, annots, type, paramName); } } /** * Parse comma. * * @return Parsed node */ private STNode parseComma() { STToken token = peek(); if (token.kind == SyntaxKind.COMMA_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.COMMA); return parseComma(); } } /** * Parse return type descriptor of a function. A return type descriptor has the following structure. * * <code>return-type-descriptor := [ returns annots type-descriptor ]</code> * * @return Parsed node */ private STNode parseFuncReturnTypeDescriptor() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACE_TOKEN: case EQUAL_TOKEN: return STNodeFactory.createEmptyNode(); case RETURNS_KEYWORD: break; default: STToken nextNextToken = getNextNextToken(nextToken.kind); if (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) { break; } return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = parseReturnsKeyword(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse 'returns' keyword. * * @return Return-keyword node */ private STNode parseReturnsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETURNS_KEYWORD); return parseReturnsKeyword(); } } /** * <p> * Parse a type descriptor. A type descriptor has the following structure. * </p> * <code>type-descriptor := * &nbsp;simple-type-descriptor<br/> * &nbsp;| structured-type-descriptor<br/> * &nbsp;| behavioral-type-descriptor<br/> * &nbsp;| singleton-type-descriptor<br/> * &nbsp;| union-type-descriptor<br/> * &nbsp;| optional-type-descriptor<br/> * &nbsp;| any-type-descriptor<br/> * &nbsp;| anydata-type-descriptor<br/> * &nbsp;| byte-type-descriptor<br/> * &nbsp;| json-type-descriptor<br/> * &nbsp;| type-descriptor-reference<br/> * &nbsp;| ( type-descriptor ) * <br/> * type-descriptor-reference := qualified-identifier</code> * * @return Parsed node */ private STNode parseTypeDescriptor(ParserRuleContext context) { return parseTypeDescriptor(context, false, false); } private STNode parseTypeDescriptorInExpression(ParserRuleContext context, boolean isInConditionalExpr) { return parseTypeDescriptor(context, false, isInConditionalExpr); } private STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern, boolean isInConditionalExpr) { startContext(context); STNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern, isInConditionalExpr); endContext(); return typeDesc; } private STNode parseTypeDescriptorWithoutContext(ParserRuleContext context, boolean isInConditionalExpr) { return parseTypeDescriptorInternal(context, false, isInConditionalExpr); } private STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern, boolean isInConditionalExpr) { STNode typeDesc = parseTypeDescriptorInternal(context, isInConditionalExpr); if (typeDesc.kind == SyntaxKind.VAR_TYPE_DESC && context != ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN) { STToken missingToken = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); missingToken = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(missingToken, typeDesc, DiagnosticErrorCode.ERROR_INVALID_USAGE_OF_VAR); typeDesc = STNodeFactory.createSimpleNameReferenceNode(missingToken); } return parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern); } /** * This will handle the parsing of optional,array,union type desc to infinite length. * * @param typeDesc * @return Parsed type descriptor node */ private STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case QUESTION_MARK_TOKEN: if (context == ParserRuleContext.TYPE_DESC_IN_EXPRESSION && !isValidTypeContinuationToken(getNextNextToken(nextToken.kind)) && isValidExprStart(getNextNextToken(nextToken.kind).kind)) { return typeDesc; } return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context, isTypedBindingPattern); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { return typeDesc; } return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern); case PIPE_TOKEN: return parseUnionTypeDescriptor(typeDesc, context, isTypedBindingPattern); case BITWISE_AND_TOKEN: return parseIntersectionTypeDescriptor(typeDesc, context, isTypedBindingPattern); default: return typeDesc; } } private boolean isValidTypeContinuationToken(STToken nextToken) { switch (nextToken.kind) { case QUESTION_MARK_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: return true; default: return false; } } private STNode validateForUsageOfVar(STNode typeDesc) { if (typeDesc.kind != SyntaxKind.VAR_TYPE_DESC) { return typeDesc; } STToken missingToken = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); missingToken = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(missingToken, typeDesc, DiagnosticErrorCode.ERROR_INVALID_USAGE_OF_VAR); return STNodeFactory.createSimpleNameReferenceNode(missingToken); } /** * <p> * Parse a type descriptor, given the next token kind. * </p> * If the preceding token is <code>?</code> then it is an optional type descriptor * * @param context Current context * @param isInConditionalExpr * @return Parsed node */ private STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isInConditionalExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseTypeReference(isInConditionalExpr); case RECORD_KEYWORD: return parseRecordTypeDescriptor(); case READONLY_KEYWORD: STToken nextNextToken = getNextNextToken(nextToken.kind); SyntaxKind nextNextTokenKind = nextNextToken.kind; if (nextNextTokenKind != SyntaxKind.OBJECT_KEYWORD && nextNextTokenKind != SyntaxKind.ABSTRACT_KEYWORD && nextNextTokenKind != SyntaxKind.CLIENT_KEYWORD) { return parseSimpleTypeDescriptor(); } case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: return parseObjectTypeDescriptor(); case OPEN_PAREN_TOKEN: return parseNilOrParenthesisedTypeDesc(); case MAP_KEYWORD: case FUTURE_KEYWORD: return parseParameterizedTypeDescriptor(); case TYPEDESC_KEYWORD: return parseTypedescTypeDescriptor(); case ERROR_KEYWORD: return parseErrorTypeDescriptor(); case XML_KEYWORD: return parseXmlTypeDescriptor(); case STREAM_KEYWORD: return parseStreamTypeDescriptor(); case TABLE_KEYWORD: return parseTableTypeDescriptor(); case FUNCTION_KEYWORD: return parseFunctionTypeDesc(); case OPEN_BRACKET_TOKEN: return parseTupleTypeDesc(); case DISTINCT_KEYWORD: return parseDistinctTypeDesc(context); default: if (isSingletonTypeDescStart(nextToken.kind, true)) { return parseSingletonTypeDesc(); } if (isSimpleType(nextToken.kind)) { return parseSimpleTypeDescriptor(); } Solution solution = recover(nextToken, ParserRuleContext.TYPE_DESCRIPTOR, context, isInConditionalExpr); if (solution.action == Action.KEEP) { return parseSingletonTypeDesc(); } return parseTypeDescriptorInternal(context, isInConditionalExpr); } } /** * Parse distinct type descriptor. * <p> * <code> * distinct-type-descriptor := distinct type-descriptor * </code> * * @param context Context in which the type desc is used. * @return Distinct type descriptor */ private STNode parseDistinctTypeDesc(ParserRuleContext context) { STNode distinctKeyword = parseDistinctKeyword(); STNode typeDesc = parseTypeDescriptor(context); return STNodeFactory.createDistinctTypeDescriptorNode(distinctKeyword, typeDesc); } private STNode parseDistinctKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DISTINCT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DISTINCT_KEYWORD); return parseDistinctKeyword(); } } private STNode parseNilOrParenthesisedTypeDesc() { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); return parseNilOrParenthesisedTypeDescRhs(openParen); } private STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) { STNode closeParen; STToken nextToken = peek(); switch (nextToken.kind) { case CLOSE_PAREN_TOKEN: closeParen = parseCloseParenthesis(); return STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen); default: if (isTypeStartingToken(nextToken.kind)) { STNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS); closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen); } recover(peek(), ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen); return parseNilOrParenthesisedTypeDescRhs(openParen); } } /** * Parse simple type descriptor. * * @return Parsed node */ private STNode parseSimpleTypeDescriptor() { STToken nextToken = peek(); if (isSimpleType(nextToken.kind)) { STToken token = consume(); return createBuiltinSimpleNameReference(token); } else { recover(nextToken, ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR); return parseSimpleTypeDescriptor(); } } private STNode createBuiltinSimpleNameReference(STNode token) { SyntaxKind typeKind = getTypeSyntaxKind(token.kind); return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token); } /** * <p> * Parse function body. A function body has the following structure. * </p> * <code> * function-body := function-body-block | external-function-body * external-function-body := = annots external ; * function-body-block := { [default-worker-init named-worker-decl+] default-worker } * </code> * * @param isObjectMethod Flag indicating whether this is an object-method * @return Parsed node */ protected STNode parseFunctionBody(boolean isObjectMethod) { switch (peek().kind) { case EQUAL_TOKEN: return parseExternalFunctionBody(); case OPEN_BRACE_TOKEN: return parseFunctionBodyBlock(false); case RIGHT_DOUBLE_ARROW_TOKEN: return parseExpressionFuncBody(false, false); case SEMICOLON_TOKEN: if (isObjectMethod) { return parseSemicolon(); } default: STToken token = peek(); recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod); return parseFunctionBody(isObjectMethod); } } /** * <p> * Parse function body block. A function body block has the following structure. * </p> * * <code> * function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/> * default-worker-init := sequence-stmt<br/> * default-worker := sequence-stmt<br/> * named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/> * worker-name := identifier<br/> * </code> * * @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function * @return Parsed node */ private STNode parseFunctionBodyBlock(boolean isAnonFunc) { startContext(ParserRuleContext.FUNC_BODY_BLOCK); STNode openBrace = parseOpenBrace(); STToken token = peek(); ArrayList<STNode> firstStmtList = new ArrayList<>(); ArrayList<STNode> workers = new ArrayList<>(); ArrayList<STNode> secondStmtList = new ArrayList<>(); ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT; boolean hasNamedWorkers = false; while (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (currentCtx) { case DEFAULT_WORKER_INIT: if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) { firstStmtList.add(stmt); break; } currentCtx = ParserRuleContext.NAMED_WORKERS; hasNamedWorkers = true; case NAMED_WORKERS: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { workers.add(stmt); break; } currentCtx = ParserRuleContext.DEFAULT_WORKER; case DEFAULT_WORKER: default: if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { updateLastNodeInListWithInvalidNode(secondStmtList, stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE); break; } secondStmtList.add(stmt); break; } token = peek(); } STNode namedWorkersList; STNode statements; if (hasNamedWorkers) { STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList); STNode namedWorkers = STNodeFactory.createNodeList(workers); namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers); statements = STNodeFactory.createNodeList(secondStmtList); } else { namedWorkersList = STNodeFactory.createEmptyNode(); statements = STNodeFactory.createNodeList(firstStmtList); } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace); } private boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) { if (isAnonFunc) { switch (nextTokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case EOF_TOKEN: case EQUAL_TOKEN: case BACKTICK_TOKEN: return true; default: break; } } return isEndOfStatements(); } private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case TYPE_KEYWORD: case PUBLIC_KEYWORD: default: return endOfModuleLevelNode(1); } } private boolean isEndOfObjectTypeNode() { return endOfModuleLevelNode(1, true); } private boolean isEndOfStatements() { switch (peek().kind) { case RESOURCE_KEYWORD: return true; default: return endOfModuleLevelNode(1); } } private boolean endOfModuleLevelNode(int peekIndex) { return endOfModuleLevelNode(peekIndex, false); } private boolean endOfModuleLevelNode(int peekIndex, boolean isObject) { switch (peek(peekIndex).kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case IMPORT_KEYWORD: case CONST_KEYWORD: case ANNOTATION_KEYWORD: case LISTENER_KEYWORD: return true; case SERVICE_KEYWORD: return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1); case PUBLIC_KEYWORD: return endOfModuleLevelNode(peekIndex + 1, isObject); case FUNCTION_KEYWORD: if (isObject) { return false; } return peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN; default: return false; } } /** * Check whether the given token is an end of a parameter. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise */ private boolean isEndOfParameter(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case AT_TOKEN: return true; default: return endOfModuleLevelNode(1); } } /** * Check whether the given token is an end of a parameter-list. * * @param tokenKind Next token kind * @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise */ private boolean isEndOfParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_PAREN_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: return true; default: return endOfModuleLevelNode(1); } } /** * Parse type reference or variable reference. * * @return Parsed node */ private STNode parseStatementStartIdentifier() { return parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); } /** * Parse variable name. * * @return Parsed node */ private STNode parseVariableName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.VARIABLE_NAME); return parseVariableName(); } } /** * Parse open brace. * * @return Parsed node */ private STNode parseOpenBrace() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPEN_BRACE); return parseOpenBrace(); } } /** * Parse close brace. * * @return Parsed node */ private STNode parseCloseBrace() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSE_BRACE); return parseCloseBrace(); } } /** * <p> * Parse external function body. An external function body has the following structure. * </p> * <code> * external-function-body := = annots external ; * </code> * * @return Parsed node */ private STNode parseExternalFunctionBody() { startContext(ParserRuleContext.EXTERNAL_FUNC_BODY); STNode assign = parseAssignOp(); return parseExternalFuncBodyRhs(assign); } private STNode parseExternalFuncBodyRhs(STNode assign) { STNode annotation; STToken nextToken = peek(); switch (nextToken.kind) { case AT_TOKEN: annotation = parseAnnotations(); break; case EXTERNAL_KEYWORD: annotation = STNodeFactory.createEmptyNodeList(); break; default: recover(nextToken, ParserRuleContext.EXTERNAL_FUNC_BODY_OPTIONAL_ANNOTS, assign); return parseExternalFuncBodyRhs(assign); } STNode externalKeyword = parseExternalKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon); } /** * Parse semicolon. * * @return Parsed node */ private STNode parseSemicolon() { STToken token = peek(); if (token.kind == SyntaxKind.SEMICOLON_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SEMICOLON); return parseSemicolon(); } } /** * Parse <code>external</code> keyword. * * @return Parsed node */ private STNode parseExternalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EXTERNAL_KEYWORD); return parseExternalKeyword(); } } /* * Operators */ /** * Parse assign operator. * * @return Parsed node */ private STNode parseAssignOp() { STToken token = peek(); if (token.kind == SyntaxKind.EQUAL_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ASSIGN_OP); return parseAssignOp(); } } /** * Parse binary operator. * * @return Parsed node */ private STNode parseBinaryOperator() { STToken token = peek(); if (isBinaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.BINARY_OPERATOR); return parseBinaryOperator(); } } /** * Check whether the given token kind is a binary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isBinaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case GT_TOKEN: case LT_TOKEN: case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case GT_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case LOGICAL_AND_TOKEN: case LOGICAL_OR_TOKEN: case PERCENT_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: case ELVIS_TOKEN: case EQUALS_KEYWORD: return true; default: return false; } } /** * Get the precedence of a given operator. * * @param binaryOpKind Operator kind * @return Precedence of the given operator */ private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) { switch (binaryOpKind) { case ASTERISK_TOKEN: case SLASH_TOKEN: case PERCENT_TOKEN: return OperatorPrecedence.MULTIPLICATIVE; case PLUS_TOKEN: case MINUS_TOKEN: return OperatorPrecedence.ADDITIVE; case GT_TOKEN: case LT_TOKEN: case GT_EQUAL_TOKEN: case LT_EQUAL_TOKEN: case IS_KEYWORD: return OperatorPrecedence.BINARY_COMPARE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case OPEN_PAREN_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return OperatorPrecedence.MEMBER_ACCESS; case DOUBLE_EQUAL_TOKEN: case TRIPPLE_EQUAL_TOKEN: case NOT_EQUAL_TOKEN: case NOT_DOUBLE_EQUAL_TOKEN: case EQUALS_KEYWORD: return OperatorPrecedence.EQUALITY; case BITWISE_AND_TOKEN: return OperatorPrecedence.BITWISE_AND; case BITWISE_XOR_TOKEN: return OperatorPrecedence.BITWISE_XOR; case PIPE_TOKEN: return OperatorPrecedence.BITWISE_OR; case LOGICAL_AND_TOKEN: return OperatorPrecedence.LOGICAL_AND; case LOGICAL_OR_TOKEN: return OperatorPrecedence.LOGICAL_OR; case RIGHT_ARROW_TOKEN: return OperatorPrecedence.REMOTE_CALL_ACTION; case RIGHT_DOUBLE_ARROW_TOKEN: return OperatorPrecedence.ANON_FUNC_OR_LET; case SYNC_SEND_TOKEN: return OperatorPrecedence.ACTION; case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: return OperatorPrecedence.SHIFT; case ELLIPSIS_TOKEN: case DOUBLE_DOT_LT_TOKEN: return OperatorPrecedence.RANGE; case ELVIS_TOKEN: return OperatorPrecedence.ELVIS_CONDITIONAL; case QUESTION_MARK_TOKEN: case COLON_TOKEN: return OperatorPrecedence.CONDITIONAL; default: throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'"); } } /** * <p> * Get the operator kind to insert during recovery, given the precedence level. * </p> * * @param opPrecedenceLevel Precedence of the given operator * @return Kind of the operator to insert */ private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) { switch (opPrecedenceLevel) { case MULTIPLICATIVE: return SyntaxKind.ASTERISK_TOKEN; case DEFAULT: case UNARY: case ACTION: case EXPRESSION_ACTION: case REMOTE_CALL_ACTION: case ANON_FUNC_OR_LET: case QUERY: case ADDITIVE: return SyntaxKind.PLUS_TOKEN; case SHIFT: return SyntaxKind.DOUBLE_LT_TOKEN; case RANGE: return SyntaxKind.ELLIPSIS_TOKEN; case BINARY_COMPARE: return SyntaxKind.LT_TOKEN; case EQUALITY: return SyntaxKind.DOUBLE_EQUAL_TOKEN; case BITWISE_AND: return SyntaxKind.BITWISE_AND_TOKEN; case BITWISE_XOR: return SyntaxKind.BITWISE_XOR_TOKEN; case BITWISE_OR: return SyntaxKind.PIPE_TOKEN; case LOGICAL_AND: return SyntaxKind.LOGICAL_AND_TOKEN; case LOGICAL_OR: return SyntaxKind.LOGICAL_OR_TOKEN; case ELVIS_CONDITIONAL: return SyntaxKind.ELVIS_TOKEN; default: throw new UnsupportedOperationException( "Unsupported operator precedence level'" + opPrecedenceLevel + "'"); } } /** * <p> * Parse a module type definition. * </p> * <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code> * * @param metadata Metadata * @param qualifier Visibility qualifier * @return Parsed node */ private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_TYPE_DEFINITION); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse type keyword. * * @return Parsed node */ private STNode parseTypeKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPE_KEYWORD); return parseTypeKeyword(); } } /** * Parse type name. * * @return Parsed node */ private STNode parseTypeName() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.TYPE_NAME); return parseTypeName(); } } /** * <p> * Parse record type descriptor. A record type descriptor body has the following structure. * </p> * * <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor * <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* } * <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |} * </code> * * @return Parsed node */ private STNode parseRecordTypeDescriptor() { startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR); STNode recordKeyword = parseRecordKeyword(); STNode bodyStartDelimiter = parseRecordBodyStartDelimiter(); boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN; ArrayList<STNode> recordFields = new ArrayList<>(); STToken token = peek(); STNode recordRestDescriptor = null; while (!isEndOfRecordTypeNode(token.kind)) { STNode field = parseFieldOrRestDescriptor(isInclusive); if (field == null) { break; } token = peek(); if (field.kind == SyntaxKind.RECORD_REST_TYPE) { recordRestDescriptor = field; break; } recordFields.add(field); } while (recordRestDescriptor != null && !isEndOfRecordTypeNode(token.kind)) { STNode invalidField = parseFieldOrRestDescriptor(isInclusive); recordRestDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(recordRestDescriptor, invalidField, DiagnosticErrorCode.ERROR_MORE_RECORD_FIELDS_AFTER_REST_FIELD); token = peek(); } STNode fields = STNodeFactory.createNodeList(recordFields); STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind); endContext(); return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields, recordRestDescriptor, bodyEndDelimiter); } /** * Parse record body start delimiter. * * @return Parsed node */ private STNode parseRecordBodyStartDelimiter() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACE_PIPE_TOKEN: return parseClosedRecordBodyStart(); case OPEN_BRACE_TOKEN: return parseOpenBrace(); default: recover(nextToken, ParserRuleContext.RECORD_BODY_START); return parseRecordBodyStartDelimiter(); } } /** * Parse closed-record body start delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyStart() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START); return parseClosedRecordBodyStart(); } } /** * Parse record body close delimiter. * * @return Parsed node */ private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) { if (startingDelimeter == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) { return parseClosedRecordBodyEnd(); } return parseCloseBrace(); } /** * Parse closed-record body end delimiter. * * @return Parsed node */ private STNode parseClosedRecordBodyEnd() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END); return parseClosedRecordBodyEnd(); } } /** * Parse record keyword. * * @return Parsed node */ private STNode parseRecordKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RECORD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RECORD_KEYWORD); return parseRecordKeyword(); } } /** * <p> * Parse field descriptor or rest descriptor. * </p> * * <code> * <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference * <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ; * <br/><br/>field-name := identifier * <br/><br/>default-value := = expression * <br/><br/>record-type-reference := * type-reference ; * <br/><br/>record-rest-descriptor := type-descriptor ... ; * </code> * * @return Parsed node */ private STNode parseFieldOrRestDescriptor(boolean isInclusive) { STToken nextToken = peek(); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: return null; case ASTERISK_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); case DOCUMENTATION_STRING: case AT_TOKEN: startContext(ParserRuleContext.RECORD_FIELD); STNode metadata = parseMetaData(); nextToken = peek(); return parseRecordField(nextToken, isInclusive, metadata); default: if (isTypeStartingToken(nextToken.kind)) { startContext(ParserRuleContext.RECORD_FIELD); metadata = STNodeFactory.createEmptyNode(); return parseRecordField(nextToken, isInclusive, metadata); } recover(peek(), ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive); return parseFieldOrRestDescriptor(isInclusive); } } private STNode parseRecordField(STToken nextToken, boolean isInclusive, STNode metadata) { if (nextToken.kind != SyntaxKind.READONLY_KEYWORD) { STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type); endContext(); return fieldOrRestDesc; } STNode type; STNode fieldOrRestDesc; STNode readOnlyQualifier; readOnlyQualifier = parseReadonlyKeyword(); nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME); if (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { type = fieldNameOrTypeDesc; } else { nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: case EQUAL_TOKEN: type = createBuiltinSimpleNameReference(readOnlyQualifier); readOnlyQualifier = STNodeFactory.createEmptyNode(); STNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name; return parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName); default: type = parseComplexTypeDescriptor(fieldNameOrTypeDesc, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); break; } } } else if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) { type = createBuiltinSimpleNameReference(readOnlyQualifier); fieldOrRestDesc = parseFieldDescriptor(isInclusive, metadata, type); endContext(); return fieldOrRestDesc; } else if (isTypeStartingToken(nextToken.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); } else { readOnlyQualifier = createBuiltinSimpleNameReference(readOnlyQualifier); type = parseComplexTypeDescriptor(readOnlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); readOnlyQualifier = STNodeFactory.createEmptyNode(); } fieldOrRestDesc = parseIndividualRecordField(metadata, readOnlyQualifier, type); endContext(); return fieldOrRestDesc; } private STNode parseFieldDescriptor(boolean isInclusive, STNode metadata, STNode type) { if (isInclusive) { STNode readOnlyQualifier = STNodeFactory.createEmptyNode(); return parseIndividualRecordField(metadata, readOnlyQualifier, type); } else { return parseFieldOrRestDescriptorRhs(metadata, type); } } private STNode parseIndividualRecordField(STNode metadata, STNode readOnlyQualifier, STNode type) { STNode fieldName = parseVariableName(); return parseFieldDescriptorRhs(metadata, readOnlyQualifier, type, fieldName); } /** * Parse type reference. * <code>type-reference := identifier | qualified-identifier</code> * * @return Type reference node */ private STNode parseTypeReference() { STNode typeReference = parseTypeDescriptor(ParserRuleContext.TYPE_REFERENCE); if (typeReference.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { if (typeReference.hasDiagnostics()) { STNode emptyNameReference = STNodeFactory.createSimpleNameReferenceNode (SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER)); return emptyNameReference; } return typeReference; } if (typeReference.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return typeReference; } STNode emptyNameReference = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); emptyNameReference = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(emptyNameReference, typeReference, DiagnosticErrorCode.ONLY_TYPE_REFERENCE_ALLOWED_HERE_AS_TYPE_INCLUSIONS); return emptyNameReference; } private STNode parseTypeReference(boolean isInConditionalExpr) { return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE, isInConditionalExpr); } /** * Parse identifier or qualified identifier. * * @return Identifier node */ private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) { return parseQualifiedIdentifier(currentCtx, false); } private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx, boolean isInConditionalExpr) { STToken token = peek(); STNode typeRefOrPkgRef; if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { typeRefOrPkgRef = consume(); } else { recover(token, currentCtx, isInConditionalExpr); if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseQualifiedIdentifier(currentCtx, isInConditionalExpr); } typeRefOrPkgRef = consume(); } return parseQualifiedIdentifier(typeRefOrPkgRef, isInConditionalExpr); } /** * Parse identifier or qualified identifier, given the starting identifier. * * @param identifier Starting identifier * @return Parse node */ private STNode parseQualifiedIdentifier(STNode identifier, boolean isInConditionalExpr) { STToken nextToken = peek(1); if (nextToken.kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case IDENTIFIER_TOKEN: STToken colon = consume(); STNode varOrFuncName = consume(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); case MAP_KEYWORD: colon = consume(); STToken mapKeyword = consume(); STNode refName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(), mapKeyword.trailingMinutiae(), mapKeyword.diagnostics()); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, refName); case COLON_TOKEN: addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseQualifiedIdentifier(identifier, isInConditionalExpr); default: if (isInConditionalExpr) { return STNodeFactory.createSimpleNameReferenceNode(identifier); } colon = consume(); varOrFuncName = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName); } } /** * Parse RHS of a field or rest type descriptor. * * @param metadata Metadata * @param type Type descriptor * @return Parsed node */ private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) { STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken); case IDENTIFIER_TOKEN: STNode readonlyQualifier = STNodeFactory.createEmptyNode(); return parseIndividualRecordField(metadata, readonlyQualifier, type); default: recover(nextToken, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type); return parseFieldOrRestDescriptorRhs(metadata, type); } } /** * <p> * Parse field descriptor rhs. * </p> * * <code> * field-descriptor := [? | default-value] ; * <br/>default-value := = expression * </code> * * @param metadata Metadata * @param type Type descriptor * @param fieldName Field name * @return Parsed node */ private STNode parseFieldDescriptorRhs(STNode metadata, STNode readonlyQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: STNode questionMarkToken = STNodeFactory.createEmptyNode(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName, questionMarkToken, semicolonToken); case QUESTION_MARK_TOKEN: questionMarkToken = parseQuestionMark(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldNode(metadata, readonlyQualifier, type, fieldName, questionMarkToken, semicolonToken); case EQUAL_TOKEN: STNode equalsToken = parseAssignOp(); STNode expression = parseExpression(); semicolonToken = parseSemicolon(); return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, readonlyQualifier, type, fieldName, equalsToken, expression, semicolonToken); default: recover(nextToken, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, readonlyQualifier, type, fieldName); return parseFieldDescriptorRhs(metadata, readonlyQualifier, type, fieldName); } } /** * Parse question mark. * * @return Parsed node */ private STNode parseQuestionMark() { STToken token = peek(); if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.QUESTION_MARK); return parseQuestionMark(); } } /* * Statements */ /** * Parse statements, until an end of a block is reached. * * @return Parsed node */ private STNode parseStatements() { ArrayList<STNode> stmts = new ArrayList<>(); return parseStatements(stmts); } private STNode parseStatements(ArrayList<STNode> stmts) { while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) { addInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_NAMED_WORKER_NOT_ALLOWED_HERE); break; } stmts.add(stmt); } return STNodeFactory.createNodeList(stmts); } /** * Parse a single statement. * * @return Parsed node */ protected STNode parseStatement() { STToken nextToken = peek(); STNode annots = null; switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: return null; case SEMICOLON_TOKEN: addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseStatement(); case AT_TOKEN: annots = parseOptionalAnnotations(); break; case FINAL_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case PANIC_KEYWORD: case CONTINUE_KEYWORD: case BREAK_KEYWORD: case RETURN_KEYWORD: case TYPE_KEYWORD: case LOCK_KEYWORD: case OPEN_BRACE_TOKEN: case FORK_KEYWORD: case FOREACH_KEYWORD: case XMLNS_KEYWORD: case TRANSACTION_KEYWORD: case RETRY_KEYWORD: case ROLLBACK_KEYWORD: case MATCH_KEYWORD: case ON_KEYWORD: case FAIL_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case COMMIT_KEYWORD: case WORKER_KEYWORD: break; default: if (isTypeStartingToken(nextToken.kind)) { break; } if (isValidExpressionStart(nextToken.kind, 1)) { break; } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT); if (solution.action == Action.KEEP) { break; } return parseStatement(); } return parseStatement(annots); } private STNode getAnnotations(STNode nullbaleAnnot) { if (nullbaleAnnot != null) { return nullbaleAnnot; } return STNodeFactory.createEmptyNodeList(); } /** * Parse a single statement, given the next token kind. * * @param tokenKind Next token kind * @return Parsed node */ private STNode parseStatement(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case CLOSE_BRACE_TOKEN: addInvalidNodeToNextToken(annots, DiagnosticErrorCode.ERROR_INVALID_ANNOTATIONS); return null; case SEMICOLON_TOKEN: addInvalidTokenToNextToken(errorHandler.consumeInvalidToken()); return parseStatement(annots); case FINAL_KEYWORD: STNode finalKeyword = parseFinalKeyword(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); case IF_KEYWORD: return parseIfElseBlock(); case WHILE_KEYWORD: return parseWhileStatement(); case DO_KEYWORD: return parseDoStatement(); case PANIC_KEYWORD: return parsePanicStatement(); case CONTINUE_KEYWORD: return parseContinueStatement(); case BREAK_KEYWORD: return parseBreakStatement(); case RETURN_KEYWORD: return parseReturnStatement(); case FAIL_KEYWORD: return parseFailStatement(); case TYPE_KEYWORD: return parseLocalTypeDefinitionStatement(getAnnotations(annots)); case LOCK_KEYWORD: return parseLockStatement(); case OPEN_BRACE_TOKEN: return parseStatementStartsWithOpenBrace(); case WORKER_KEYWORD: return parseNamedWorkerDeclaration(getAnnotations(annots)); case FORK_KEYWORD: return parseForkStatement(); case FOREACH_KEYWORD: return parseForEachStatement(); case START_KEYWORD: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case TRAP_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case FROM_KEYWORD: case COMMIT_KEYWORD: return parseExpressionStatement(getAnnotations(annots)); case XMLNS_KEYWORD: return parseXMLNamespaceDeclaration(false); case TRANSACTION_KEYWORD: return parseTransactionStatement(); case RETRY_KEYWORD: return parseRetryStatement(); case ROLLBACK_KEYWORD: return parseRollbackStatement(); case OPEN_BRACKET_TOKEN: return parseStatementStartsWithOpenBracket(getAnnotations(annots), false); case FUNCTION_KEYWORD: case OPEN_PAREN_TOKEN: case IDENTIFIER_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_KEYWORD: case XML_KEYWORD: return parseStmtStartsWithTypeOrExpr(getAnnotations(annots)); case MATCH_KEYWORD: return parseMatchStatement(); case ERROR_KEYWORD: return parseErrorTypeDescOrErrorBP(getAnnotations(annots)); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseStatementStartWithExpr(getAnnotations(annots)); } if (isTypeStartingToken(nextToken.kind)) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } STToken token = peek(); Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots); if (solution.action == Action.KEEP) { finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } return parseStatement(annots); } } /** * <p> * Parse variable declaration. Variable declaration can be a local or module level. * </p> * * <code> * local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt * <br/><br/> * local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ; * <br/><br/> * local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ; * </code> * * @param annots Annotations or metadata * @param finalKeyword Final keyword * @return Parsed node */ private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode typeBindingPattern = parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, isModuleVar); } /** * Parse final keyword. * * @return Parsed node */ private STNode parseFinalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FINAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FINAL_KEYWORD); return parseFinalKeyword(); } } /** * <p> * Parse the right hand side of a variable declaration statement. * </p> * <code> * var-decl-rhs := ; | = action-or-expr ; * </code> * * @param metadata metadata * @param finalKeyword Final keyword * @param typedBindingPattern Typed binding pattern * @return Parsed node */ private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode typedBindingPattern, boolean isModuleVar) { STNode assign; STNode expr; STNode semicolon; STToken nextToken = peek(); switch (nextToken.kind) { case EQUAL_TOKEN: assign = parseAssignOp(); if (isModuleVar) { expr = parseExpression(); } else { expr = parseActionOrExpression(); } semicolon = parseSemicolon(); break; case SEMICOLON_TOKEN: assign = STNodeFactory.createEmptyNode(); expr = STNodeFactory.createEmptyNode(); semicolon = parseSemicolon(); break; default: recover(nextToken, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, typedBindingPattern, isModuleVar); return parseVarDeclRhs(metadata, finalKeyword, typedBindingPattern, isModuleVar); } endContext(); if (isModuleVar) { return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } assert metadata.kind == SyntaxKind.LIST; return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, typedBindingPattern, assign, expr, semicolon); } /** * <p> * Parse the RHS portion of the assignment. * </p> * <code>assignment-stmt-rhs := = action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseAssignmentStmtRhs(STNode lvExpr) { STNode assign = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); endContext(); if (lvExpr.kind == SyntaxKind.FUNCTION_CALL && isPossibleErrorBindingPattern((STFunctionCallExpressionNode) lvExpr)) { lvExpr = getBindingPattern(lvExpr); } boolean lvExprValid = isValidLVExpr(lvExpr); if (!lvExprValid) { STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier); lvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr, DiagnosticErrorCode.ERROR_INVALID_EXPR_IN_ASSIGNMENT_LHS); } return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon); } /* * Expressions */ /** * Parse expression. This will start parsing expressions from the lowest level of precedence. * * @return Parsed node */ protected STNode parseExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, false); } /** * Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence. * * @return Parsed node */ private STNode parseActionOrExpression() { return parseExpression(DEFAULT_OP_PRECEDENCE, true, true); } private STNode parseActionOrExpressionInLhs(STNode annots) { return parseExpression(DEFAULT_OP_PRECEDENCE, annots, false, true, false); } /** * Parse expression. * * @param isRhsExpr Flag indicating whether this is a rhs expression * @return Parsed node */ private STNode parseExpression(boolean isRhsExpr) { return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false); } private boolean isValidLVExpr(STNode expression) { switch (expression.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case ERROR_BINDING_PATTERN: return true; case FIELD_ACCESS: return isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression); case INDEXED_EXPRESSION: return isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression); default: return (expression instanceof STMissingToken); } } private boolean isValidLVMemberExpr(STNode expression) { switch (expression.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: return true; case FIELD_ACCESS: return isValidLVMemberExpr(((STFieldAccessExpressionNode) expression).expression); case INDEXED_EXPRESSION: return isValidLVMemberExpr(((STIndexedExpressionNode) expression).containerExpression); case BRACED_EXPRESSION: return isValidLVMemberExpr(((STBracedExpressionNode) expression).expression); default: return (expression instanceof STMissingToken); } } /** * Parse an expression that has an equal or higher precedence than a given level. * * @param precedenceLevel Precedence level of expression to be parsed * @param isRhsExpr Flag indicating whether this is a rhs expression * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) { return parseExpression(precedenceLevel, isRhsExpr, allowActions, false); } private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { return parseExpression(precedenceLevel, isRhsExpr, allowActions, false, isInConditionalExpr); } private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { STNode expr = parseTerminalExpression(isRhsExpr, allowActions, isInConditionalExpr); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } private STNode attachErrorExpectedActionFoundDiagnostic(STNode node) { return SyntaxErrors.addDiagnostic(node, DiagnosticErrorCode.ERROR_EXPRESSION_EXPECTED_ACTION_FOUND); } private STNode parseExpression(OperatorPrecedence precedenceLevel, STNode annots, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode expr = parseTerminalExpression(annots, isRhsExpr, allowActions, isInConditionalExpr); return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions, false, isInConditionalExpr); } private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode annots; if (peek().kind == SyntaxKind.AT_TOKEN) { annots = parseOptionalAnnotations(); } else { annots = STNodeFactory.createEmptyNodeList(); } STNode expr = parseTerminalExpression(annots, isRhsExpr, allowActions, isInConditionalExpr); if (!isNodeListEmpty(annots) && expr.kind != SyntaxKind.START_ACTION) { expr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(expr, annots, DiagnosticErrorCode.ERROR_ANNOTATIONS_ATTACHED_TO_EXPRESSION); } return expr; } /** * Parse terminal expressions. A terminal expression has the highest precedence level * out of all expressions, and will be at the leaves of an expression tree. * * @param annots Annotations * @param isRhsExpr Is a rhs expression * @param allowActions Allow actions * @return Parsed node */ private STNode parseTerminalExpression(STNode annots, boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF, isInConditionalExpr); case OPEN_PAREN_TOKEN: return parseBracedExpression(isRhsExpr, allowActions); case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: return parseCheckExpression(isRhsExpr, allowActions, isInConditionalExpr); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case TYPEOF_KEYWORD: return parseTypeofExpression(isRhsExpr, isInConditionalExpr); case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return parseUnaryExpression(isRhsExpr, isInConditionalExpr); case TRAP_KEYWORD: return parseTrapExpression(isRhsExpr, allowActions, isInConditionalExpr); case OPEN_BRACKET_TOKEN: return parseListConstructorExpr(); case LT_TOKEN: return parseTypeCastExpr(isRhsExpr, allowActions, isInConditionalExpr); case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: return parseTableConstructorOrQuery(isRhsExpr); case ERROR_KEYWORD: if (peek(2).kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPattern(); } return parseErrorConstructorExpr(); case LET_KEYWORD: return parseLetExpression(isRhsExpr); case BACKTICK_TOKEN: return parseTemplateExpression(); case XML_KEYWORD: STToken nextNextToken = getNextNextToken(nextToken.kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseXMLTemplateExpression(); } return parseSimpleTypeDescriptor(); case STRING_KEYWORD: nextNextToken = getNextNextToken(nextToken.kind); if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) { return parseStringTemplateExpression(); } return parseSimpleTypeDescriptor(); case FUNCTION_KEYWORD: return parseExplicitFunctionExpression(annots, isRhsExpr); case AT_TOKEN: break; case NEW_KEYWORD: return parseNewExpression(); case START_KEYWORD: return parseStartAction(annots); case FLUSH_KEYWORD: return parseFlushAction(); case LEFT_ARROW_TOKEN: return parseReceiveAction(); case WAIT_KEYWORD: return parseWaitAction(); case COMMIT_KEYWORD: return parseCommitAction(); case TRANSACTIONAL_KEYWORD: return parseTransactionalExpression(); case SERVICE_KEYWORD: return parseServiceConstructorExpression(annots); case BASE16_KEYWORD: case BASE64_KEYWORD: return parseByteArrayLiteral(); default: if (isSimpleType(nextToken.kind)) { return parseSimpleTypeDescriptor(); } break; } Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, annots, isRhsExpr, allowActions, isInConditionalExpr); if (solution.action == Action.KEEP) { if (nextToken.kind == SyntaxKind.XML_KEYWORD) { return parseXMLTemplateExpression(); } return parseStringTemplateExpression(); } return parseTerminalExpression(annots, isRhsExpr, allowActions, isInConditionalExpr); } private boolean isValidExprStart(SyntaxKind tokenKind) { switch (tokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case IDENTIFIER_TOKEN: case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case TABLE_KEYWORD: case STREAM_KEYWORD: case FROM_KEYWORD: case ERROR_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case XML_KEYWORD: case STRING_KEYWORD: case FUNCTION_KEYWORD: case AT_TOKEN: case NEW_KEYWORD: case START_KEYWORD: case FLUSH_KEYWORD: case LEFT_ARROW_TOKEN: case WAIT_KEYWORD: case SERVICE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * <p> * Parse a new expression. * </p> * <code> * new-expr := explicit-new-expr | implicit-new-expr * <br/> * explicit-new-expr := new type-descriptor ( arg-list ) * <br/> * implicit-new-expr := new [( arg-list )] * </code> * * @return Parsed NewExpression node. */ private STNode parseNewExpression() { STNode newKeyword = parseNewKeyword(); return parseNewKeywordRhs(newKeyword); } /** * <p> * Parse `new` keyword. * </p> * * @return Parsed NEW_KEYWORD Token. */ private STNode parseNewKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.NEW_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.NEW_KEYWORD); return parseNewKeyword(); } } private STNode parseNewKeywordRhs(STNode newKeyword) { STNode token = peek(); return parseNewKeywordRhs(token.kind, newKeyword); } /** * <p> * Parse an implicit or explicit new expression. * </p> * * @param kind next token kind. * @param newKeyword parsed node for `new` keyword. * @return Parsed new-expression node. */ private STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) { switch (kind) { case OPEN_PAREN_TOKEN: return parseImplicitNewRhs(newKeyword); case SEMICOLON_TOKEN: break; case IDENTIFIER_TOKEN: case OBJECT_KEYWORD: case STREAM_KEYWORD: return parseTypeDescriptorInNewExpr(newKeyword); default: break; } return STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode()); } /** * <p> * Parse an Explicit New expression. * </p> * <code> * explicit-new-expr := new type-descriptor ( arg-list ) * </code> * * @param newKeyword Parsed `new` keyword. * @return the Parsed Explicit New Expression. */ private STNode parseTypeDescriptorInNewExpr(STNode newKeyword) { STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR); STNode parenthesizedArgsList = parseParenthesizedArgList(); return STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList); } /** * <p> * Parse an <code>implicit-new-expr</code> with arguments. * </p> * * @param newKeyword Parsed `new` keyword. * @return Parsed implicit-new-expr. */ private STNode parseImplicitNewRhs(STNode newKeyword) { STNode implicitNewArgList = parseParenthesizedArgList(); return STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList); } /** * <p> * Parse the parenthesized argument list for a <code>new-expr</code>. * </p> * * @return Parsed parenthesized rhs of <code>new-expr</code>. */ private STNode parseParenthesizedArgList() { STNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParan = parseCloseParenthesis(); return STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan); } /** * <p> * Parse the right-hand-side of an expression. * </p> * <code>expr-rhs := (binary-op expression * | dot identifier * | open-bracket expression close-bracket * )*</code> * * @param precedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression of the expression * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Flag indicating whether the current context support actions * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions) { return parseExpressionRhs(precedenceLevel, lhsExpr, isRhsExpr, allowActions, false, false); } /** * Parse the right hand side of an expression given the next token kind. * * @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently * @param lhsExpr LHS expression * @param isRhsExpr Flag indicating whether this is a rhs expr or not * @param allowActions Flag indicating whether to allow actions or not * @param isInMatchGuard Flag indicating whether this expression is in a match-guard * @return Parsed node */ private STNode parseExpressionRhs(OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { STNode actionOrExpression = parseExpressionRhsInternal(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); if (!allowActions && isAction(actionOrExpression) && actionOrExpression.kind != SyntaxKind.BRACED_ACTION) { actionOrExpression = attachErrorExpectedActionFoundDiagnostic(actionOrExpression); } return actionOrExpression; } private STNode parseExpressionRhsInternal(OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { SyntaxKind nextTokenKind = peek().kind; if (isEndOfExpression(nextTokenKind, isRhsExpr, isInMatchGuard, lhsExpr.kind)) { return lhsExpr; } if (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) { return lhsExpr; } if (!isValidExprRhsStart(nextTokenKind, lhsExpr.kind)) { return recoverExpressionRhs(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } if (nextTokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) { if (peek(3).kind == SyntaxKind.GT_TOKEN) { nextTokenKind = SyntaxKind.TRIPPLE_GT_TOKEN; } else { nextTokenKind = SyntaxKind.DOUBLE_GT_TOKEN; } } OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(nextTokenKind); if (currentPrecedenceLevel.isHigherThanOrEqual(nextOperatorPrecedence, allowActions)) { return lhsExpr; } STNode newLhsExpr; STNode operator; switch (nextTokenKind) { case OPEN_PAREN_TOKEN: newLhsExpr = parseFuncCall(lhsExpr); break; case OPEN_BRACKET_TOKEN: newLhsExpr = parseMemberAccessExpr(lhsExpr, isRhsExpr); break; case DOT_TOKEN: newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr, isInConditionalExpr); break; case IS_KEYWORD: newLhsExpr = parseTypeTestExpression(lhsExpr, isInConditionalExpr); break; case RIGHT_ARROW_TOKEN: newLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr); break; case SYNC_SEND_TOKEN: newLhsExpr = parseSyncSendAction(lhsExpr); break; case RIGHT_DOUBLE_ARROW_TOKEN: newLhsExpr = parseImplicitAnonFunc(lhsExpr, isRhsExpr); break; case ANNOT_CHAINING_TOKEN: newLhsExpr = parseAnnotAccessExpression(lhsExpr, isInConditionalExpr); break; case OPTIONAL_CHAINING_TOKEN: newLhsExpr = parseOptionalFieldAccessExpression(lhsExpr, isInConditionalExpr); break; case QUESTION_MARK_TOKEN: newLhsExpr = parseConditionalExpression(lhsExpr); break; case DOT_LT_TOKEN: newLhsExpr = parseXMLFilterExpression(lhsExpr); break; case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: newLhsExpr = parseXMLStepExpression(lhsExpr); break; default: if (nextTokenKind == SyntaxKind.SLASH_TOKEN && peek(2).kind == SyntaxKind.LT_TOKEN) { SyntaxKind expectedNodeType = getExpectedNodeKind(3, isRhsExpr, isInMatchGuard, lhsExpr.kind); if (expectedNodeType == SyntaxKind.XML_STEP_EXPRESSION) { newLhsExpr = createXMLStepExpression(lhsExpr); break; } } if (nextTokenKind == SyntaxKind.DOUBLE_GT_TOKEN) { operator = parseSignedRightShiftToken(); } else if (nextTokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) { operator = parseUnsignedRightShiftToken(); } else { operator = parseBinaryOperator(); } if (isAction(lhsExpr) && lhsExpr.kind != SyntaxKind.BRACED_ACTION) { lhsExpr = attachErrorExpectedActionFoundDiagnostic(lhsExpr); } STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false, isInConditionalExpr); newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator, rhsExpr); break; } return parseExpressionRhsInternal(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } private STNode recoverExpressionRhs(OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr, boolean isRhsExpr, boolean allowActions, boolean isInMatchGuard, boolean isInConditionalExpr) { STToken token = peek(); Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); if (solution.action == Action.REMOVE) { return parseExpressionRhs(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) { SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel); insertToken(binaryOpKind); return parseExpressionRhsInternal(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } else { return parseExpressionRhsInternal(currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions, isInMatchGuard, isInConditionalExpr); } } private STNode createXMLStepExpression(STNode lhsExpr) { STNode newLhsExpr; STNode slashToken = parseSlashToken(); STNode ltToken = parseLTToken(); STNode slashLT; if (hasTrailingMinutiae(slashToken) || hasLeadingMinutiae(ltToken)) { List<STNodeDiagnostic> diagnostics = new ArrayList<>(); diagnostics .add(SyntaxErrors.createDiagnostic(DiagnosticErrorCode.ERROR_INVALID_WHITESPACE_IN_SLASH_LT_TOKEN)); slashLT = STNodeFactory.createMissingToken(SyntaxKind.SLASH_LT_TOKEN, diagnostics); slashLT = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(slashLT, slashToken); slashLT = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(slashLT, ltToken); } else { slashLT = STNodeFactory.createToken(SyntaxKind.SLASH_LT_TOKEN, slashToken.leadingMinutiae(), ltToken.trailingMinutiae()); } STNode namePattern = parseXMLNamePatternChain(slashLT); newLhsExpr = STNodeFactory.createXMLStepExpressionNode(lhsExpr, namePattern); return newLhsExpr; } private SyntaxKind getExpectedNodeKind(int lookahead, boolean isRhsExpr, boolean isInMatchGuard, SyntaxKind precedingNodeKind) { STToken nextToken = peek(lookahead); switch (nextToken.kind) { case ASTERISK_TOKEN: return SyntaxKind.XML_STEP_EXPRESSION; case GT_TOKEN: break; case PIPE_TOKEN: return getExpectedNodeKind(++lookahead, isRhsExpr, isInMatchGuard, precedingNodeKind); case IDENTIFIER_TOKEN: nextToken = peek(++lookahead); switch (nextToken.kind) { case GT_TOKEN: break; case PIPE_TOKEN: return getExpectedNodeKind(++lookahead, isRhsExpr, isInMatchGuard, precedingNodeKind); case COLON_TOKEN: nextToken = peek(++lookahead); switch (nextToken.kind) { case ASTERISK_TOKEN: case GT_TOKEN: return SyntaxKind.XML_STEP_EXPRESSION; case IDENTIFIER_TOKEN: nextToken = peek(++lookahead); if (nextToken.kind == SyntaxKind.PIPE_TOKEN) { return getExpectedNodeKind(++lookahead, isRhsExpr, isInMatchGuard, precedingNodeKind); } break; default: return SyntaxKind.TYPE_CAST_EXPRESSION; } break; default: return SyntaxKind.TYPE_CAST_EXPRESSION; } break; default: return SyntaxKind.TYPE_CAST_EXPRESSION; } nextToken = peek(++lookahead); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case PLUS_TOKEN: case MINUS_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: return SyntaxKind.XML_STEP_EXPRESSION; default: if (isValidExpressionStart(nextToken.kind, lookahead)) { break; } return SyntaxKind.XML_STEP_EXPRESSION; } return SyntaxKind.TYPE_CAST_EXPRESSION; } private boolean hasTrailingMinutiae(STNode node) { return node.widthWithTrailingMinutiae() > node.width(); } private boolean hasLeadingMinutiae(STNode node) { return node.widthWithLeadingMinutiae() > node.width(); } private boolean isValidExprRhsStart(SyntaxKind tokenKind, SyntaxKind precedingNodeKind) { switch (tokenKind) { case OPEN_PAREN_TOKEN: return precedingNodeKind == SyntaxKind.QUALIFIED_NAME_REFERENCE || precedingNodeKind == SyntaxKind.SIMPLE_NAME_REFERENCE; case DOT_TOKEN: case OPEN_BRACKET_TOKEN: case IS_KEYWORD: case RIGHT_ARROW_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: case SYNC_SEND_TOKEN: case ANNOT_CHAINING_TOKEN: case OPTIONAL_CHAINING_TOKEN: case QUESTION_MARK_TOKEN: case COLON_TOKEN: case DOT_LT_TOKEN: case SLASH_LT_TOKEN: case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: case SLASH_ASTERISK_TOKEN: return true; default: return isBinaryOperator(tokenKind); } } /** * Parse member access expression. * * @param lhsExpr Container expression * @param isRhsExpr Is this is a rhs expression * @return Member access expression */ private STNode parseMemberAccessExpr(STNode lhsExpr, boolean isRhsExpr) { startContext(ParserRuleContext.MEMBER_ACCESS_KEY_EXPR); STNode openBracket = parseOpenBracket(); STNode keyExpr = parseMemberAccessKeyExprs(isRhsExpr); STNode closeBracket = parseCloseBracket(); endContext(); if (isRhsExpr && ((STNodeList) keyExpr).isEmpty()) { STNode missingVarRef = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); keyExpr = STNodeFactory.createNodeList(missingVarRef); closeBracket = SyntaxErrors.addDiagnostic(closeBracket, DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR); } return STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket); } /** * Parse key expression of a member access expression. A type descriptor * that starts with a type-ref (e.g: T[a][b]) also goes through this * method. * <p> * <code>key-expression := single-key-expression | multi-key-expression</code> * * @param isRhsExpr Is this is a rhs expression * @return Key expression */ private STNode parseMemberAccessKeyExprs(boolean isRhsExpr) { List<STNode> exprList = new ArrayList<>(); STNode keyExpr; STNode keyExprEnd; while (!isEndOfTypeList(peek().kind)) { keyExpr = parseKeyExpr(isRhsExpr); exprList.add(keyExpr); keyExprEnd = parseMemberAccessKeyExprEnd(); if (keyExprEnd == null) { break; } exprList.add(keyExprEnd); } return STNodeFactory.createNodeList(exprList); } private STNode parseKeyExpr(boolean isRhsExpr) { if (!isRhsExpr && peek().kind == SyntaxKind.ASTERISK_TOKEN) { return STNodeFactory.createBasicLiteralNode(SyntaxKind.ASTERISK_LITERAL, consume()); } return parseExpression(isRhsExpr); } private STNode parseMemberAccessKeyExprEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.MEMBER_ACCESS_KEY_EXPR_END); return parseMemberAccessKeyExprEnd(); } } /** * Parse close bracket. * * @return Parsed node */ private STNode parseCloseBracket() { STToken token = peek(); if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.CLOSE_BRACKET); return parseCloseBracket(); } } /** * Parse field access, xml required attribute access expressions or method call expression. * <p> * <code> * field-access-expr := expression . field-name * <br/> * xml-required-attribute-access-expr := expression . xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * method-call-expr := expression . method-name ( arg-list ) * </code> * * @param lhsExpr Preceding expression of the field access or method call * @return One of <code>field-access-expression</code> or <code>method-call-expression</code>. */ private STNode parseFieldAccessOrMethodCall(STNode lhsExpr, boolean isInConditionalExpr) { STNode dotToken = parseDotToken(); STToken token = peek(); if (token.kind == SyntaxKind.MAP_KEYWORD || token.kind == SyntaxKind.START_KEYWORD) { STNode methodName = getKeywordAsSimpleNameRef(); STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, methodName, openParen, args, closeParen); } STNode fieldOrMethodName = parseFieldAccessIdentifier(isInConditionalExpr); if (fieldOrMethodName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args, closeParen); } return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName); } private STNode getKeywordAsSimpleNameRef() { STToken mapKeyword = consume(); STNode methodName = STNodeFactory.createIdentifierToken(mapKeyword.text(), mapKeyword.leadingMinutiae(), mapKeyword.trailingMinutiae(), mapKeyword.diagnostics()); methodName = STNodeFactory.createSimpleNameReferenceNode(methodName); return methodName; } /** * <p> * Parse braced expression. * </p> * <code>braced-expr := ( expression )</code> * * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @param allowActions Allow actions * @return Parsed node */ private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); if (peek().kind == SyntaxKind.CLOSE_PAREN_TOKEN) { return parseNilLiteralOrEmptyAnonFuncParamRhs(openParen); } startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); STNode expr; if (allowActions) { expr = parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true); } else { expr = parseExpression(isRhsExpr); } return parseBracedExprOrAnonFuncParamRhs(openParen, expr, isRhsExpr); } private STNode parseNilLiteralOrEmptyAnonFuncParamRhs(STNode openParen) { STNode closeParen = parseCloseParenthesis(); STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return STNodeFactory.createNilLiteralNode(openParen, closeParen); } else { STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return anonFuncParam; } } private STNode parseBracedExprOrAnonFuncParamRhs(STNode openParen, STNode expr, boolean isRhsExpr) { STToken nextToken = peek(); if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { switch (nextToken.kind) { case CLOSE_PAREN_TOKEN: break; case COMMA_TOKEN: return parseImplicitAnonFunc(openParen, expr, isRhsExpr); default: recover(nextToken, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS, openParen, expr, isRhsExpr); return parseBracedExprOrAnonFuncParamRhs(openParen, expr, isRhsExpr); } } STNode closeParen = parseCloseParenthesis(); endContext(); if (isAction(expr)) { return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen); } return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen); } /** * Check whether a given node is an action node. * * @param node Node to check * @return <code>true</code> if the node is an action node. <code>false</code> otherwise */ private boolean isAction(STNode node) { switch (node.kind) { case REMOTE_METHOD_CALL_ACTION: case BRACED_ACTION: case CHECK_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } /** * Check whether the given token is an end of a expression. * * @param tokenKind Token to check * @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement * @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise */ private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr, boolean isInMatchGuard, SyntaxKind precedingNodeKind) { if (!isRhsExpr) { if (isCompoundBinaryOperator(tokenKind)) { return true; } if (isInMatchGuard && tokenKind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return true; } return !isValidExprRhsStart(tokenKind, precedingNodeKind); } switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case OPEN_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case COMMA_TOKEN: case PUBLIC_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case RESOURCE_KEYWORD: case EQUAL_TOKEN: case DOCUMENTATION_STRING: case AT_TOKEN: case AS_KEYWORD: case IN_KEYWORD: case FROM_KEYWORD: case WHERE_KEYWORD: case LET_KEYWORD: case SELECT_KEYWORD: case DO_KEYWORD: case COLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: case LIMIT_KEYWORD: case JOIN_KEYWORD: case OUTER_KEYWORD: case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return true; case RIGHT_DOUBLE_ARROW_TOKEN: return isInMatchGuard; default: return isSimpleType(tokenKind); } } /** * Parse basic literals. It is assumed that we come here after validation. * * @return Parsed node */ private STNode parseBasicLiteral() { STNode literalToken = consume(); return parseBasicLiteral(literalToken); } private STNode parseBasicLiteral(STNode literalToken) { SyntaxKind nodeKind; switch (literalToken.kind) { case NULL_KEYWORD: nodeKind = SyntaxKind.NULL_LITERAL; break; case TRUE_KEYWORD: case FALSE_KEYWORD: nodeKind = SyntaxKind.BOOLEAN_LITERAL; break; case DECIMAL_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: nodeKind = SyntaxKind.NUMERIC_LITERAL; break; case STRING_LITERAL_TOKEN: nodeKind = SyntaxKind.STRING_LITERAL; break; case ASTERISK_TOKEN: nodeKind = SyntaxKind.ASTERISK_LITERAL; break; default: nodeKind = literalToken.kind; } return STNodeFactory.createBasicLiteralNode(nodeKind, literalToken); } /** * Parse function call expression. * <code>function-call-expr := function-reference ( arg-list ) * function-reference := variable-reference</code> * * @param identifier Function name * @return Function call expression */ private STNode parseFuncCall(STNode identifier) { STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode args = parseArgsList(); STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen); } /** * <p> * Parse error constructor expression. * </p> * <code> * error-constructor-expr := error ( arg-list ) * </code> * * @return Error constructor expression */ private STNode parseErrorConstructorExpr() { STNode errorKeyword = parseErrorKeyword(); errorKeyword = createBuiltinSimpleNameReference(errorKeyword); return parseFuncCall(errorKeyword); } /** * Parse function call argument list. * * @return Parsed args list */ private STNode parseArgsList() { startContext(ParserRuleContext.ARG_LIST); STToken token = peek(); if (isEndOfParametersList(token.kind)) { STNode args = STNodeFactory.createEmptyNodeList(); endContext(); return args; } STNode firstArg = parseArgument(); STNode argsList = parseArgList(firstArg); endContext(); return argsList; } /** * Parse follow up arguments. * * @param firstArg first argument in the list * @return the argument list */ private STNode parseArgList(STNode firstArg) { ArrayList<STNode> argsList = new ArrayList<>(); argsList.add(firstArg); SyntaxKind lastValidArgKind = firstArg.kind; STToken nextToken = peek(); while (!isEndOfParametersList(nextToken.kind)) { STNode argEnd = parseArgEnd(); if (argEnd == null) { break; } STNode curArg = parseArgument(); DiagnosticErrorCode errorCode = validateArgumentOrder(lastValidArgKind, curArg.kind); if (errorCode == null) { argsList.add(argEnd); argsList.add(curArg); lastValidArgKind = curArg.kind; } else if (errorCode == DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG && isMissingPositionalArg(curArg)) { argsList.add(argEnd); argsList.add(curArg); } else { updateLastNodeInListWithInvalidNode(argsList, argEnd, null); updateLastNodeInListWithInvalidNode(argsList, curArg, errorCode); } nextToken = peek(); } return STNodeFactory.createNodeList(argsList); } private DiagnosticErrorCode validateArgumentOrder(SyntaxKind prevArgKind, SyntaxKind curArgKind) { DiagnosticErrorCode errorCode = null; switch (prevArgKind) { case POSITIONAL_ARG: break; case NAMED_ARG: if (curArgKind == SyntaxKind.POSITIONAL_ARG) { errorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG; } break; case REST_ARG: errorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; break; default: throw new IllegalStateException("Invalid SyntaxKind in an argument"); } return errorCode; } private boolean isMissingPositionalArg(STNode arg) { STNode expr = ((STPositionalArgumentNode) arg).expression; return expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE && ((STSimpleNameReferenceNode) expr).name.isMissing(); } private STNode parseArgEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ARG_END); return parseArgEnd(); } } /** * Parse function call argument. * * @return Parsed argument node */ private STNode parseArgument() { STNode arg; STToken nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: STToken ellipsis = consume(); STNode expr = parseExpression(); arg = STNodeFactory.createRestArgumentNode(ellipsis, expr); break; case IDENTIFIER_TOKEN: arg = parseNamedOrPositionalArg(); break; default: if (isValidExprStart(nextToken.kind)) { expr = parseExpression(); arg = STNodeFactory.createPositionalArgumentNode(expr); break; } recover(peek(), ParserRuleContext.ARG_START); return parseArgument(); } return arg; } /** * Parse positional or named arg. This method assumed peek()/peek(1) * is always an identifier. * * @return Parsed argument node */ private STNode parseNamedOrPositionalArg() { STNode argNameOrExpr = parseTerminalExpression(true, false, false); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = parseAssignOp(); STNode valExpr = parseExpression(); return STNodeFactory.createNamedArgumentNode(argNameOrExpr, equal, valExpr); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: return STNodeFactory.createPositionalArgumentNode(argNameOrExpr); default: argNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, true, false); return STNodeFactory.createPositionalArgumentNode(argNameOrExpr); } } /** * Parse object type descriptor. * * @return Parsed node */ private STNode parseObjectTypeDescriptor() { startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR); STNode objectTypeQualifiers = parseObjectTypeQualifiers(); STNode objectKeyword = parseObjectKeyword(); STNode openBrace = parseOpenBrace(); STNode objectMembers = parseObjectMembers(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace, objectMembers, closeBrace); } /** * Parse object type qualifiers. * * @return Parsed node */ private STNode parseObjectTypeQualifiers() { STNode firstQualifier; STToken nextToken = peek(); switch (nextToken.kind) { case CLIENT_KEYWORD: firstQualifier = parseClientKeyword(); break; case ABSTRACT_KEYWORD: firstQualifier = parseAbstractKeyword(); break; case READONLY_KEYWORD: firstQualifier = parseReadonlyKeyword(); break; case OBJECT_KEYWORD: return STNodeFactory.createEmptyNodeList(); default: recover(nextToken, ParserRuleContext.OBJECT_TYPE_QUALIFIER); return parseObjectTypeQualifiers(); } return parseObjectTypeNextQualifiers(firstQualifier); } private STNode parseObjectTypeNextQualifiers(STNode firstQualifier) { List<STNode> qualifiers = new ArrayList<>(); qualifiers.add(firstQualifier); for (int i = 0; i < 2; i++) { STNode nextToken = peek(); if (isNodeWithSyntaxKindInList(qualifiers, nextToken.kind)) { nextToken = consume(); updateLastNodeInListWithInvalidNode(qualifiers, nextToken, DiagnosticErrorCode.ERROR_SAME_OBJECT_TYPE_QUALIFIER); continue; } STNode nextQualifier; switch (nextToken.kind) { case CLIENT_KEYWORD: nextQualifier = parseClientKeyword(); break; case ABSTRACT_KEYWORD: nextQualifier = parseAbstractKeyword(); break; case READONLY_KEYWORD: nextQualifier = parseReadonlyKeyword(); break; case OBJECT_KEYWORD: default: return STNodeFactory.createNodeList(qualifiers); } qualifiers.add(nextQualifier); } return STNodeFactory.createNodeList(qualifiers); } /** * Parse client keyword. * * @return Parsed node */ private STNode parseClientKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CLIENT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CLIENT_KEYWORD); return parseClientKeyword(); } } /** * Parse abstract keyword. * * @return Parsed node */ private STNode parseAbstractKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ABSTRACT_KEYWORD); return parseAbstractKeyword(); } } /** * Parse object keyword. * * @return Parsed node */ private STNode parseObjectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.OBJECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.OBJECT_KEYWORD); return parseObjectKeyword(); } } /** * Parse object members. * * @return Parsed node */ private STNode parseObjectMembers() { ArrayList<STNode> objectMembers = new ArrayList<>(); while (!isEndOfObjectTypeNode()) { startContext(ParserRuleContext.OBJECT_MEMBER); STNode member = parseObjectMember(); endContext(); if (member == null) { break; } objectMembers.add(member); } return STNodeFactory.createNodeList(objectMembers); } private STNode parseObjectMember() { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } recover(peek(), ParserRuleContext.OBJECT_MEMBER_START); return parseObjectMember(); } return parseObjectMemberWithoutMeta(metadata); } private STNode parseObjectMemberWithoutMeta(STNode metadata) { STNode member; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: STNode asterisk = consume(); STNode type = parseTypeReference(); STNode semicolonToken = parseSemicolon(); member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken); break; case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: STNode visibilityQualifier = parseObjectMemberVisibility(); member = parseObjectMethodOrField(metadata, visibilityQualifier); break; case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: member = parseObjectMethod(metadata, new ArrayList<>()); break; default: if (isTypeStartingToken(nextToken.kind)) { member = parseObjectField(metadata, STNodeFactory.createEmptyNode()); break; } recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA, metadata); return parseObjectMemberWithoutMeta(metadata); } return member; } /** * Parse object visibility. Visibility can be <code>public</code> or <code>private</code>. * * @return Parsed node */ private STNode parseObjectMemberVisibility() { STToken token = peek(); if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.OBJECT_MEMBER_QUALIFIER); return parseObjectMemberVisibility(); } } /** * Parse an object member, given the visibility modifier. Object member can have * only one visibility qualifier. This mean the methodQualifiers list can have * one qualifier at-most. * * @param metadata Metadata * @param visibilityQualifiers Visibility qualifiers * @return Parse object member node */ private STNode parseObjectMethodOrField(STNode metadata, STNode visibilityQualifier) { STToken nextToken = peek(1); List<STNode> qualifiers = new ArrayList<>(); switch (nextToken.kind) { case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: if (visibilityQualifier != null) { qualifiers.add(visibilityQualifier); } return parseObjectMethod(metadata, qualifiers); case IDENTIFIER_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind != SyntaxKind.OPEN_PAREN_TOKEN) { return parseObjectField(metadata, visibilityQualifier); } break; default: if (isTypeStartingToken(nextToken.kind)) { return parseObjectField(metadata, visibilityQualifier); } break; } recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata, visibilityQualifier); return parseObjectMethodOrField(metadata, visibilityQualifier); } /** * Parse function qualifiers. * * @return Parsed node */ private STNode parseFunctionQualifiers(ParserRuleContext context, List<STNode> qualifierList) { STToken nextToken = peek(); while (!isEndOfFunctionQualifiers(nextToken.kind)) { STNode qualifier; switch (nextToken.kind) { case REMOTE_KEYWORD: qualifier = parseRemoteKeyword(); break; case TRANSACTIONAL_KEYWORD: qualifier = parseTransactionalKeyword(); break; case RESOURCE_KEYWORD: qualifier = parseResourceKeyword(); break; default: recover(peek(), context, context, qualifierList); return parseFunctionQualifiers(context, qualifierList); } DiagnosticCode diagnosticCode = validateFunctionQualifier(qualifier, context, qualifierList); if (diagnosticCode != null) { if (qualifierList.size() == 0) { addInvalidNodeToNextToken(qualifier, diagnosticCode, qualifier.toString().trim()); } else { updateLastNodeInListWithInvalidNode(qualifierList, qualifier, diagnosticCode); } } else { qualifierList.add(qualifier); } nextToken = peek(); } return STNodeFactory.createNodeList(qualifierList); } private boolean isEndOfFunctionQualifiers(SyntaxKind tokenKind) { switch (tokenKind) { case FUNCTION_KEYWORD: case EOF_TOKEN: return true; default: return false; } } private DiagnosticCode validateFunctionQualifier(STNode currentQualifier, ParserRuleContext context, List<STNode> qualifierList) { switch (currentQualifier.kind) { case REMOTE_KEYWORD: if (context != ParserRuleContext.OBJECT_METHOD_START) { return DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED; } break; case TRANSACTIONAL_KEYWORD: break; default: if (context != ParserRuleContext.RESOURCE_DEF_QUALIFIERS) { return DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED; } } return validateFunctionQualifier(currentQualifier, qualifierList); } private DiagnosticCode validateFunctionQualifier(STNode currentQualifier, List<STNode> qualifierList) { for (STNode node : qualifierList) { if (node.kind == currentQualifier.kind) { return DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER; } } return null; } private STNode parseRemoteKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.REMOTE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.REMOTE_KEYWORD); return parseRemoteKeyword(); } } private STNode parseObjectField(STNode metadata, STNode methodQualifiers) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.READONLY_KEYWORD) { STNode readonlyQualifier = STNodeFactory.createEmptyNode(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName); } STNode type; STNode readonlyQualifier = parseReadonlyKeyword(); nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode fieldNameOrTypeDesc = parseQualifiedIdentifier(ParserRuleContext.RECORD_FIELD_NAME_OR_TYPE_NAME); if (fieldNameOrTypeDesc.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { type = fieldNameOrTypeDesc; } else { nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: case EQUAL_TOKEN: type = createBuiltinSimpleNameReference(readonlyQualifier); readonlyQualifier = STNodeFactory.createEmptyNode(); STNode fieldName = ((STSimpleNameReferenceNode) fieldNameOrTypeDesc).name; return parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName); default: type = parseComplexTypeDescriptor(fieldNameOrTypeDesc, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); break; } } } else if (isTypeStartingToken(nextToken.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD); } else { readonlyQualifier = createBuiltinSimpleNameReference(readonlyQualifier); type = parseComplexTypeDescriptor(readonlyQualifier, ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD, false); readonlyQualifier = STNodeFactory.createEmptyNode(); } STNode fieldName = parseVariableName(); return parseObjectFieldRhs(metadata, methodQualifiers, readonlyQualifier, type, fieldName); } /** * Parse object field rhs, and complete the object field parsing. Returns the parsed object field. * * @param metadata Metadata * @param visibilityQualifier Visibility qualifier * @param readonlyQualifier Readonly qualifier * @param type Type descriptor * @param fieldName Field name * @return Parsed object field */ private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode readonlyQualifier, STNode type, STNode fieldName) { STToken nextToken = peek(); STNode equalsToken; STNode expression; STNode semicolonToken; switch (nextToken.kind) { case SEMICOLON_TOKEN: equalsToken = STNodeFactory.createEmptyNode(); expression = STNodeFactory.createEmptyNode(); semicolonToken = parseSemicolon(); break; case EQUAL_TOKEN: equalsToken = parseAssignOp(); expression = parseExpression(); semicolonToken = parseSemicolon(); break; default: recover(peek(), ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier, readonlyQualifier, type, fieldName); return parseObjectFieldRhs(metadata, visibilityQualifier, readonlyQualifier, type, fieldName); } return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, readonlyQualifier, type, fieldName, equalsToken, expression, semicolonToken); } private STNode parseObjectMethod(STNode metadata, List<STNode> qualifiers) { return parseFuncDefOrFuncTypeDesc(ParserRuleContext.OBJECT_METHOD_START, metadata, true, qualifiers); } /** * Parse if-else statement. * <code> * if-else-stmt := if expression block-stmt [else-block] * </code> * * @return If-else block */ private STNode parseIfElseBlock() { startContext(ParserRuleContext.IF_BLOCK); STNode ifKeyword = parseIfKeyword(); STNode condition = parseExpression(); STNode ifBody = parseBlockNode(); endContext(); STNode elseBody = parseElseBlock(); return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody); } /** * Parse if-keyword. * * @return Parsed if-keyword node */ private STNode parseIfKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IF_KEYWORD); return parseIfKeyword(); } } /** * Parse else-keyword. * * @return Parsed else keyword node */ private STNode parseElseKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ELSE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ELSE_KEYWORD); return parseElseKeyword(); } } /** * Parse block node. * <code> * block-stmt := { sequence-stmt } * sequence-stmt := statement* * </code> * * @return Parse block node */ private STNode parseBlockNode() { startContext(ParserRuleContext.BLOCK_STMT); STNode openBrace = parseOpenBrace(); STNode stmts = parseStatements(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace); } /** * Parse else block. * <code>else-block := else (if-else-stmt | block-stmt)</code> * * @return Else block */ private STNode parseElseBlock() { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode elseKeyword = parseElseKeyword(); STNode elseBody = parseElseBody(); return STNodeFactory.createElseBlockNode(elseKeyword, elseBody); } /** * Parse else node body. * <code>else-body := if-else-stmt | block-stmt</code> * * @return Else node body */ private STNode parseElseBody() { STToken nextToken = peek(); switch (nextToken.kind) { case IF_KEYWORD: return parseIfElseBlock(); case OPEN_BRACE_TOKEN: return parseBlockNode(); default: recover(peek(), ParserRuleContext.ELSE_BODY); return parseElseBody(); } } /** * Parse do statement. * <code>do-stmt := do block-stmt</code> * * @return Do statement */ private STNode parseDoStatement() { startContext(ParserRuleContext.DO_BLOCK); STNode doKeyword = parseDoKeyword(); STNode doBody = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createDoStatementNode(doKeyword, doBody, onFailClause); } /** * Parse while statement. * <code>while-stmt := while expression block-stmt</code> * * @return While statement */ /** * Parse while-keyword. * * @return While-keyword node */ private STNode parseWhileKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHILE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHILE_KEYWORD); return parseWhileKeyword(); } } /** * Parse panic statement. * <code>panic-stmt := panic expression ;</code> * * @return Panic statement */ private STNode parsePanicStatement() { startContext(ParserRuleContext.PANIC_STMT); STNode panicKeyword = parsePanicKeyword(); STNode expression = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon); } /** * Parse panic-keyword. * * @return Panic-keyword node */ private STNode parsePanicKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.PANIC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.PANIC_KEYWORD); return parsePanicKeyword(); } } /** * Parse check expression. This method is used to parse both check expression * as well as check action. * * <p> * <code> * checking-expr := checking-keyword expression * checking-action := checking-keyword action * </code> * * @param allowActions Allow actions * @param isRhsExpr Is rhs expression * @return Check expression node */ private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode checkingKeyword = parseCheckingKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr); } else { return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr); } } /** * Parse checking keyword. * <p> * <code> * checking-keyword := check | checkpanic * </code> * * @return Parsed node */ private STNode parseCheckingKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CHECKING_KEYWORD); return parseCheckingKeyword(); } } /** * Parse continue statement. * <code>continue-stmt := continue ; </code> * * @return continue statement */ private STNode parseContinueStatement() { startContext(ParserRuleContext.CONTINUE_STATEMENT); STNode continueKeyword = parseContinueKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon); } /** * Parse continue-keyword. * * @return continue-keyword node */ private STNode parseContinueKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONTINUE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONTINUE_KEYWORD); return parseContinueKeyword(); } } /** * Parse fail statement. * <code>fail-stmt := return [ action-or-expr ] ;</code> * * @return Fail statement */ private STNode parseFailStatement() { startContext(ParserRuleContext.FAIL_STATEMENT); STNode failKeyword = parseFailKeyword(); STNode expr = parseExpression(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createFailStatementNode(failKeyword, expr, semicolon); } /** * Parse fail keyword. * <p> * <code> * fail-keyword := fail * </code> * * @return Parsed node */ private STNode parseFailKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FAIL_KEYWORD) { return consume(); } else { Solution sol = recover(token, ParserRuleContext.FAIL_KEYWORD); return sol.recoveredNode; } } /** * Parse return statement. * <code>return-stmt := return [ action-or-expr ] ;</code> * * @return Return statement */ private STNode parseReturnStatement() { startContext(ParserRuleContext.RETURN_STMT); STNode returnKeyword = parseReturnKeyword(); STNode returnRhs = parseReturnStatementRhs(returnKeyword); endContext(); return returnRhs; } /** * Parse return-keyword. * * @return Return-keyword node */ private STNode parseReturnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETURN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETURN_KEYWORD); return parseReturnKeyword(); } } /** * Parse break statement. * <code>break-stmt := break ; </code> * * @return break statement */ private STNode parseBreakStatement() { startContext(ParserRuleContext.BREAK_STATEMENT); STNode breakKeyword = parseBreakKeyword(); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon); } /** * Parse break-keyword. * * @return break-keyword node */ private STNode parseBreakKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BREAK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BREAK_KEYWORD); return parseBreakKeyword(); } } /** * <p> * Parse the right hand side of a return statement. * </p> * <code> * return-stmt-rhs := ; | action-or-expr ; * </code> * * @return Parsed node */ private STNode parseReturnStatementRhs(STNode returnKeyword) { STNode expr; STToken token = peek(); switch (token.kind) { case SEMICOLON_TOKEN: expr = STNodeFactory.createEmptyNode(); break; default: expr = parseActionOrExpression(); break; } STNode semicolon = parseSemicolon(); return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon); } /** * Parse mapping constructor expression. * <p> * <code>mapping-constructor-expr := { [field (, field)*] }</code> * * @return Parsed node */ private STNode parseMappingConstructorExpr() { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); STNode fields = parseMappingConstructorFields(); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse mapping constructor fields. * * @return Parsed node */ private STNode parseMappingConstructorFields() { STToken nextToken = peek(); if (isEndOfMappingConstructor(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fields = new ArrayList<>(); STNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD); if (field != null) { fields.add(field); } return parseMappingConstructorFields(fields); } private STNode parseMappingConstructorFields(List<STNode> fields) { STToken nextToken; STNode mappingFieldEnd; nextToken = peek(); while (!isEndOfMappingConstructor(nextToken.kind)) { mappingFieldEnd = parseMappingFieldEnd(); if (mappingFieldEnd == null) { break; } fields.add(mappingFieldEnd); STNode field = parseMappingField(ParserRuleContext.MAPPING_FIELD); fields.add(field); nextToken = peek(); } return STNodeFactory.createNodeList(fields); } private STNode parseMappingFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.MAPPING_FIELD_END); return parseMappingFieldEnd(); } } private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case IDENTIFIER_TOKEN: case READONLY_KEYWORD: return false; case EOF_TOKEN: case DOCUMENTATION_STRING: case AT_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case FUNCTION_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case RESOURCE_KEYWORD: return true; default: return isSimpleType(tokenKind); } } /** * Parse mapping constructor field. * <p> * <code>field := specific-field | computed-name-field | spread-field</code> * * @param fieldContext Context of the mapping field * @return Parsed node */ private STNode parseMappingField(ParserRuleContext fieldContext) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return parseSpecificFieldWithOptionalValue(readonlyKeyword); case STRING_LITERAL_TOKEN: readonlyKeyword = STNodeFactory.createEmptyNode(); return parseQualifiedSpecificField(readonlyKeyword); case READONLY_KEYWORD: readonlyKeyword = parseReadonlyKeyword(); return parseSpecificField(readonlyKeyword); case OPEN_BRACKET_TOKEN: return parseComputedField(); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); return STNodeFactory.createSpreadFieldNode(ellipsis, expr); case CLOSE_BRACE_TOKEN: if (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) { return null; } default: recover(nextToken, fieldContext, fieldContext); return parseMappingField(fieldContext); } } private STNode parseSpecificField(STNode readonlyKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case STRING_LITERAL_TOKEN: return parseQualifiedSpecificField(readonlyKeyword); case IDENTIFIER_TOKEN: return parseSpecificFieldWithOptionalValue(readonlyKeyword); default: recover(peek(), ParserRuleContext.SPECIFIC_FIELD, readonlyKeyword); return parseSpecificField(readonlyKeyword); } } private STNode parseQualifiedSpecificField(STNode readonlyKeyword) { STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } /** * Parse mapping constructor specific-field with an optional value. * * @return Parsed node */ private STNode parseSpecificFieldWithOptionalValue(STNode readonlyKeyword) { STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseSpecificFieldRhs(readonlyKeyword, key); } private STNode parseSpecificFieldRhs(STNode readonlyKeyword, STNode key) { STNode colon; STNode valueExpr; STToken nextToken = peek(); switch (nextToken.kind) { case COLON_TOKEN: colon = parseColon(); valueExpr = parseExpression(); break; case COMMA_TOKEN: colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; default: if (isEndOfMappingConstructor(nextToken.kind)) { colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); break; } recover(nextToken, ParserRuleContext.SPECIFIC_FIELD_RHS, readonlyKeyword, key); return parseSpecificFieldRhs(readonlyKeyword, key); } return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } /** * Parse string literal. * * @return Parsed node */ private STNode parseStringLiteral() { STToken token = peek(); STNode stringLiteral; if (token.kind == SyntaxKind.STRING_LITERAL_TOKEN) { stringLiteral = consume(); } else { recover(token, ParserRuleContext.STRING_LITERAL_TOKEN); return parseStringLiteral(); } return parseBasicLiteral(stringLiteral); } /** * Parse colon token. * * @return Parsed node */ private STNode parseColon() { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.COLON); return parseColon(); } } /** * Parse readonly keyword. * * @return Parsed node */ private STNode parseReadonlyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.READONLY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.READONLY_KEYWORD); return parseReadonlyKeyword(); } } /** * Parse computed-name-field of a mapping constructor expression. * <p> * <code>computed-name-field := [ field-name-expr ] : value-expr</code> * * @return Parsed node */ private STNode parseComputedField() { startContext(ParserRuleContext.COMPUTED_FIELD_NAME); STNode openBracket = parseOpenBracket(); STNode fieldNameExpr = parseExpression(); STNode closeBracket = parseCloseBracket(); endContext(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } /** * Parse open bracket. * * @return Parsed node */ private STNode parseOpenBracket() { STToken token = peek(); if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPEN_BRACKET); return parseOpenBracket(); } } /** * <p> * Parse the RHS portion of the compound assignment. * </p> * <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code> * * @param lvExpr LHS expression * @return Parsed node */ private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) { STNode binaryOperator = parseCompoundBinaryOperator(); STNode equalsToken = parseAssignOp(); STNode expr = parseActionOrExpression(); STNode semicolon = parseSemicolon(); endContext(); boolean lvExprValid = isValidLVExpr(lvExpr); if (!lvExprValid) { STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier); lvExpr = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(simpleNameRef, lvExpr, DiagnosticErrorCode.ERROR_INVALID_EXPR_IN_COMPOUND_ASSIGNMENT_LHS); } return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr, semicolon); } /** * Parse compound binary operator. * <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @return Parsed node */ private STNode parseCompoundBinaryOperator() { STToken token = peek(); if (isCompoundBinaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR); return parseCompoundBinaryOperator(); } } /** * Parse service declaration. * <p> * <code> * service-decl := metadata service [variable-name] on expression-list service-body-block * <br/> * expression-list := expression (, expression)* * </code> * * @param metadata Metadata * @return Parsed node */ private STNode parseServiceDecl(STNode metadata) { startContext(ParserRuleContext.SERVICE_DECL); STNode serviceKeyword = parseServiceKeyword(); STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword); endContext(); return serviceDecl; } /** * Parse rhs of the service declaration. * <p> * <code> * service-rhs := [variable-name] on expression-list service-body-block * </code> * * @param metadata Metadata * @param serviceKeyword Service keyword * @return Parsed node */ private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) { STNode serviceName = parseServiceName(); STNode onKeyword = parseOnKeyword(); STNode expressionList = parseListeners(); STNode serviceBody = parseServiceBody(); onKeyword = cloneWithDiagnosticIfListEmpty(expressionList, onKeyword, DiagnosticErrorCode.ERROR_MISSING_EXPRESSION); return STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword, expressionList, serviceBody); } private STNode parseServiceName() { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseIdentifier(ParserRuleContext.SERVICE_NAME); case ON_KEYWORD: return STNodeFactory.createEmptyNode(); default: recover(nextToken, ParserRuleContext.OPTIONAL_SERVICE_NAME); return parseServiceName(); } } /** * Parse service keyword. * * @return Parsed node */ private STNode parseServiceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SERVICE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SERVICE_KEYWORD); return parseServiceKeyword(); } } /** * Check whether the given token kind is a compound binary operator. * <p> * <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code> * * @param tokenKind STToken kind * @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise */ private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) { switch (tokenKind) { case PLUS_TOKEN: case MINUS_TOKEN: case SLASH_TOKEN: case ASTERISK_TOKEN: case BITWISE_AND_TOKEN: case BITWISE_XOR_TOKEN: case PIPE_TOKEN: case DOUBLE_LT_TOKEN: case DOUBLE_GT_TOKEN: case TRIPPLE_GT_TOKEN: return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN; default: return false; } } /** * Parse on keyword. * * @return Parsed node */ private STNode parseOnKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ON_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ON_KEYWORD); return parseOnKeyword(); } } /** * Parse listener references. * <p> * <code>expression-list := expression (, expression)*</code> * * @return Parsed node */ private STNode parseListeners() { startContext(ParserRuleContext.LISTENERS_LIST); List<STNode> listeners = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfListeners(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode expr = parseExpression(); listeners.add(expr); STNode listenersMemberEnd; while (!isEndOfListeners(peek().kind)) { listenersMemberEnd = parseListenersMemberEnd(); if (listenersMemberEnd == null) { break; } listeners.add(listenersMemberEnd); expr = parseExpression(); listeners.add(expr); } endContext(); return STNodeFactory.createNodeList(listeners); } private boolean isEndOfListeners(SyntaxKind tokenKind) { switch (tokenKind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListenersMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case OPEN_BRACE_TOKEN: return null; default: recover(nextToken, ParserRuleContext.LISTENERS_LIST_END); return parseListenersMemberEnd(); } } /** * Parse service body. * <p> * <code> * service-body-block := { service-method-defn* } * </code> * * @return Parsed node */ private STNode parseServiceBody() { STNode openBrace = parseOpenBrace(); STNode resources = parseResources(); STNode closeBrace = parseCloseBrace(); return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace); } /** * Parse service resource definitions. * * @return Parsed node */ private STNode parseResources() { List<STNode> resources = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfServiceDecl(nextToken.kind)) { STNode serviceMethod = parseResource(); if (serviceMethod == null) { break; } resources.add(serviceMethod); nextToken = peek(); } return STNodeFactory.createNodeList(resources); } private boolean isEndOfServiceDecl(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: case CLOSE_BRACE_PIPE_TOKEN: case TYPE_KEYWORD: case SERVICE_KEYWORD: return true; default: return false; } } /** * Parse resource definition (i.e. service-method-defn). * <p> * <code> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @return Parsed node */ private STNode parseResource() { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case RESOURCE_KEYWORD: case TRANSACTIONAL_KEYWORD: case FUNCTION_KEYWORD: case REMOTE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isEndOfServiceDecl(nextToken.kind)) { return null; } recover(peek(), ParserRuleContext.RESOURCE_DEF); return parseResource(); } return parseResource(metadata); } private STNode parseResource(STNode metadata) { STNode qualifierList = parseFunctionQualifiers(ParserRuleContext.RESOURCE_DEF_QUALIFIERS, new ArrayList<>()); return parseFuncDefinition(metadata, false, qualifierList); } /** * Parse resource keyword. * * @return Parsed node */ private STNode parseResourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RESOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RESOURCE_KEYWORD); return parseResourceKeyword(); } } /** * Check whether next construct is a service declaration or not. This method is * used to determine whether an end-of-block is reached, if the next token is * a service-keyword. Because service-keyword can be used in statements as well * as in top-level node (service-decl). We have reached a service-decl, then * it could be due to missing close-brace at the end of the current block. * * @return <code>true</code> if the next construct is a service declaration. * <code>false</code> otherwise */ private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) { switch (peek(lookahead + 1).kind) { case IDENTIFIER_TOKEN: SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind; switch (tokenAfterIdentifier) { case ON_KEYWORD: case OPEN_BRACE_TOKEN: return true; case EQUAL_TOKEN: case SEMICOLON_TOKEN: case QUESTION_MARK_TOKEN: return false; default: return false; } case ON_KEYWORD: return true; default: return false; } } /** * Parse listener declaration, given the qualifier. * <p> * <code> * listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ; * </code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.LISTENER_DECL); STNode listenerKeyword = parseListenerKeyword(); if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) { STNode listenerDecl = parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true); endContext(); return listenerDecl; } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); endContext(); return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } /** * Parse listener keyword. * * @return Parsed node */ private STNode parseListenerKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LISTENER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LISTENER_KEYWORD); return parseListenerKeyword(); } } /** * Parse constant declaration, given the qualifier. * <p> * <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code> * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @return Parsed node */ private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.CONSTANT_DECL); STNode constKeyword = parseConstantKeyword(); STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword); endContext(); return constDecl; } /** * Parse the components that follows after the const keyword of a constant declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @return Parsed node */ private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case ANNOTATION_KEYWORD: return parseAnnotationDeclaration(metadata, qualifier, constKeyword); case IDENTIFIER_TOKEN: return parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword); return parseConstDecl(metadata, qualifier, constKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER); STNode variableName = parseVariableName(); STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName, equalsToken, initializer, semicolonToken); } private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, boolean isListener) { STNode varNameOrTypeName = parseStatementStartIdentifier(); STNode constDecl = parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener); return constDecl; } /** * Parse the component that follows the first identifier in a const decl. The identifier * can be either the type-name (a user defined type) or the var-name there the type-name * is not present. * * @param qualifier Qualifier that precedes the constant decl * @param keyword Keyword * @param typeOrVarName Identifier that follows the const-keywoord * @return Parsed node */ private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword, STNode typeOrVarName, boolean isListener) { if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode type = typeOrVarName; STNode variableName = parseVariableName(); return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } STNode type; STNode variableName; switch (peek().kind) { case IDENTIFIER_TOKEN: type = typeOrVarName; variableName = parseVariableName(); break; case EQUAL_TOKEN: variableName = ((STSimpleNameReferenceNode) typeOrVarName).name; type = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName, isListener); return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener); } return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName); } private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener, STNode type, STNode variableName) { STNode equalsToken = parseAssignOp(); STNode initializer = parseExpression(); STNode semicolonToken = parseSemicolon(); if (isListener) { return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName, equalsToken, initializer, semicolonToken); } /** * Parse const keyword. * * @return Parsed node */ private STNode parseConstantKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONST_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONST_KEYWORD); return parseConstantKeyword(); } } /** * Parse typeof expression. * <p> * <code> * typeof-expr := typeof expression * </code> * * @param isRhsExpr * @return Typeof expression node */ private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode typeofKeyword = parseTypeofKeyword(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr); } /** * Parse typeof-keyword. * * @return Typeof-keyword node */ private STNode parseTypeofKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEOF_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEOF_KEYWORD); return parseTypeofKeyword(); } } /** * Parse optional type descriptor. * <p> * <code>optional-type-descriptor := type-descriptor ? </code> * </p> * * @return Parsed node */ private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) { startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR); STNode questionMarkToken = parseQuestionMark(); endContext(); typeDescriptorNode = validateForUsageOfVar(typeDescriptorNode); return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken); } /** * Parse unary expression. * <p> * <code> * unary-expr := + expression | - expression | ~ expression | ! expression * </code> * * @param isRhsExpr * @return Unary expression node */ private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) { STNode unaryOperator = parseUnaryOperator(); STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr); return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr); } /** * Parse unary operator. * <code>UnaryOperator := + | - | ~ | !</code> * * @return Parsed node */ private STNode parseUnaryOperator() { STToken token = peek(); if (isUnaryOperator(token.kind)) { return consume(); } else { recover(token, ParserRuleContext.UNARY_OPERATOR); return parseUnaryOperator(); } } /** * Check whether the given token kind is a unary operator. * * @param kind STToken kind * @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise */ private boolean isUnaryOperator(SyntaxKind kind) { switch (kind) { case PLUS_TOKEN: case MINUS_TOKEN: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: return true; default: return false; } } /** * Parse array type descriptor. * <p> * <code> * array-type-descriptor := member-type-descriptor [ [ array-length ] ] * member-type-descriptor := type-descriptor * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * inferred-array-length := * * </code> * </p> * * @param memberTypeDesc * @return Parsed Node */ private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) { startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR); STNode openBracketToken = parseOpenBracket(); STNode arrayLengthNode = parseArrayLength(); STNode closeBracketToken = parseCloseBracket(); endContext(); return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode, STNode closeBracketToken) { memberTypeDesc = validateForUsageOfVar(memberTypeDesc); return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken); } /** * Parse array length. * <p> * <code> * array-length := * int-literal * | constant-reference-expr * | inferred-array-length * constant-reference-expr := variable-reference-expr * </code> * </p> * * @return Parsed array length */ private STNode parseArrayLength() { STToken token = peek(); switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH); default: recover(token, ParserRuleContext.ARRAY_LENGTH); return parseArrayLength(); } } /** * Parse annotations. * <p> * <i>Note: In the ballerina spec ({@link https: * annotations-list is specified as one-or-more annotations. And the usage is marked as * optional annotations-list. However, for the consistency of the tree, here we make the * annotation-list as zero-or-more annotations, and the usage is not-optional.</i> * <p> * <code>annots := annotation*</code> * * @return Parsed node */ private STNode parseOptionalAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation list with at least one annotation. * * @return Annotation list */ private STNode parseAnnotations() { startContext(ParserRuleContext.ANNOTATIONS); List<STNode> annotList = new ArrayList<>(); annotList.add(parseAnnotation()); while (peek().kind == SyntaxKind.AT_TOKEN) { annotList.add(parseAnnotation()); } endContext(); return STNodeFactory.createNodeList(annotList); } /** * Parse annotation attachment. * <p> * <code>annotation := @ annot-tag-reference annot-value</code> * * @return Parsed node */ private STNode parseAnnotation() { STNode atToken = parseAtToken(); STNode annotReference; if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) { annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); } else { annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE); } STNode annotValue; if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { annotValue = parseMappingConstructorExpr(); } else { annotValue = STNodeFactory.createEmptyNode(); } return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue); } /** * Parse '@' token. * * @return Parsed node */ private STNode parseAtToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.AT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.AT); return parseAtToken(); } } /** * Parse metadata. Meta data consist of optional doc string and * an annotations list. * <p> * <code>metadata := [DocumentationString] annots</code> * * @return Parse node */ private STNode parseMetaData() { STNode docString; STNode annotations; switch (peek().kind) { case DOCUMENTATION_STRING: docString = parseMarkdownDocumentation(); annotations = parseOptionalAnnotations(); break; case AT_TOKEN: docString = STNodeFactory.createEmptyNode(); annotations = parseOptionalAnnotations(); break; default: return STNodeFactory.createEmptyNode(); } return createMetadata(docString, annotations); } /** * Create metadata node. * * @return A metadata node */ private STNode createMetadata(STNode docString, STNode annotations) { if (annotations == null && docString == null) { return STNodeFactory.createEmptyNode(); } else { return STNodeFactory.createMetadataNode(docString, annotations); } } /** * Parse is expression. * <code> * is-expr := expression is type-descriptor * </code> * * @param lhsExpr Preceding expression of the is expression * @return Is expression node */ private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode isKeyword = parseIsKeyword(); STNode typeDescriptor = parseTypeDescriptorInExpression(ParserRuleContext.TYPE_DESC_IN_EXPRESSION, isInConditionalExpr); return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor); } /** * Parse is-keyword. * * @return Is-keyword node */ private STNode parseIsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IS_KEYWORD); return parseIsKeyword(); } } /** * Parse local type definition statement statement. * <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code> * * @return local type definition statement statement */ private STNode parseLocalTypeDefinitionStatement(STNode annots) { startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT); STNode typeKeyword = parseTypeKeyword(); STNode typeName = parseTypeName(); STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF); STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor, semicolon); } /** * Parse statement which is only consists of an action or expression. * * @param annots Annotations * @param nextTokenKind Next token kind * @return Statement node */ private STNode parseExpressionStatement(STNode annots) { startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expression = parseActionOrExpressionInLhs(annots); return getExpressionAsStatement(expression); } /** * Parse statements that starts with an expression. * * @return Statement node */ private STNode parseStatementStartWithExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseActionOrExpressionInLhs(annots); return parseStatementStartWithExprRhs(expr); } /** * Parse the component followed by the expression, at the beginning of a statement. * * @param nextTokenKind Kind of the next token * @return Statement node */ private STNode parseStatementStartWithExprRhs(STNode expression) { STToken nextToken = peek(); switch (nextToken.kind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(expression); case SEMICOLON_TOKEN: return getExpressionAsStatement(expression); case IDENTIFIER_TOKEN: default: if (isCompoundBinaryOperator(nextToken.kind)) { return parseCompoundAssignmentStmtRhs(expression); } ParserRuleContext context; if (isPossibleExpressionStatement(expression)) { context = ParserRuleContext.EXPR_STMT_RHS; } else { context = ParserRuleContext.STMT_START_WITH_EXPR_RHS; } recover(peek(), context, expression); return parseStatementStartWithExprRhs(expression); } } private boolean isPossibleExpressionStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return true; default: return false; } } private STNode getExpressionAsStatement(STNode expression) { switch (expression.kind) { case METHOD_CALL: case FUNCTION_CALL: case CHECK_EXPRESSION: return parseCallStatement(expression); case REMOTE_METHOD_CALL_ACTION: case CHECK_ACTION: case BRACED_ACTION: case START_ACTION: case TRAP_ACTION: case FLUSH_ACTION: case ASYNC_SEND_ACTION: case SYNC_SEND_ACTION: case RECEIVE_ACTION: case WAIT_ACTION: case QUERY_ACTION: case COMMIT_ACTION: return parseActionStatement(expression); default: STNode semicolon = parseSemicolon(); endContext(); STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT, expression, semicolon); exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT); return exprStmt; } } private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) { STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression); STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression; if (lengthExprs.isEmpty()) { return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(), indexedExpr.closeBracket); } STNode lengthExpr = lengthExprs.get(0); switch (lengthExpr.kind) { case ASTERISK_LITERAL: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: break; case NUMERIC_LITERAL: SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind; if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN || innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) { break; } default: STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae( indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH); indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics); lengthExpr = STNodeFactory.createEmptyNode(); } return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket); } /** * <p> * Parse call statement, given the call expression. * <p> * <code> * call-stmt := call-expr ; * <br/> * call-expr := function-call-expr | method-call-expr | checking-keyword call-expr * </code> * * @param expression Call expression associated with the call statement * @return Call statement node */ private STNode parseCallStatement(STNode expression) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon); } private STNode parseActionStatement(STNode action) { STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon); } /** * Parse remote method call action, given the starting expression. * <p> * <code> * remote-method-call-action := expression -> method-name ( arg-list ) * <br/> * async-send-action := expression -> peer-worker ; * </code> * * @param isRhsExpr Is this an RHS action * @param expression LHS expression * @return */ private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) { STNode rightArrow = parseRightArrow(); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) { STNode name; STToken nextToken = peek(); switch (nextToken.kind) { case DEFAULT_KEYWORD: STNode defaultKeyword = parseDefaultKeyword(); name = STNodeFactory.createSimpleNameReferenceNode(defaultKeyword); return parseAsyncSendAction(expression, rightArrow, name); case IDENTIFIER_TOKEN: name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName()); break; case CONTINUE_KEYWORD: case COMMIT_KEYWORD: name = getKeywordAsSimpleNameRef(); break; default: STToken token = peek(); recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow); return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow); } return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseRemoteMethodCallAction(expression, rightArrow, name); case SEMICOLON_TOKEN: return parseAsyncSendAction(expression, rightArrow, name); default: recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name); return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name); } } /** * Parse default keyword. * * @return default keyword node */ private STNode parseDefaultKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DEFAULT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DEFAULT_KEYWORD); return parseDefaultKeyword(); } } private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) { return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker); } private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) { STNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START); STNode arguments = parseArgsList(); STNode closeParenToken = parseCloseParenthesis(); return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments, closeParenToken); } /** * Parse right arrow (<code>-></code>) token. * * @return Parsed node */ private STNode parseRightArrow() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.RIGHT_ARROW); return parseRightArrow(); } } /** * Parse parameterized type descriptor. * parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter * * @return Parsed node */ private STNode parseParameterizedTypeDescriptor() { STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword(); STNode typeParameter = parseTypeParameter(); return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter); } /** * Parse <code>map</code> or <code>future</code> keyword token. * * @return Parsed node */ private STNode parseParameterizedTypeKeyword() { STToken nextToken = peek(); switch (nextToken.kind) { case MAP_KEYWORD: case FUTURE_KEYWORD: return consume(); default: recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE); return parseParameterizedTypeKeyword(); } } /** * Parse <code> < </code> token. * * @return Parsed node */ private STNode parseGTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.GT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.GT); return parseGTToken(); } } /** * Parse <code> > </code> token. * * @return Parsed node */ private STNode parseLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.LT); return parseLTToken(); } } /** * Parse nil literal. Here nil literal is only referred to ( ). * * @return Parsed node */ private STNode parseNilLiteral() { startContext(ParserRuleContext.NIL_LITERAL); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken); } /** * Parse annotation declaration, given the qualifier. * * @param metadata Metadata * @param qualifier Qualifier that precedes the listener declaration * @param constKeyword Const keyword * @return Parsed node */ private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) { startContext(ParserRuleContext.ANNOTATION_DECL); STNode annotationKeyword = parseAnnotationKeyword(); STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); endContext(); return annotDecl; } /** * Parse annotation keyword. * * @return Parsed node */ private STNode parseAnnotationKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ANNOTATION_KEYWORD); return parseAnnotationKeyword(); } } /** * Parse the components that follows after the annotation keyword of a annotation declaration. * * @param metadata Metadata * @param qualifier Qualifier that precedes the constant decl * @param constKeyword Const keyword * @param annotationKeyword * @return Parsed node */ private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword); default: if (isTypeStartingToken(nextToken.kind)) { break; } recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword, annotationKeyword); return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword); } STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } /** * Parse annotation tag. * <p> * <code>annot-tag := identifier</code> * * @return */ private STNode parseAnnotationTag() { STToken token = peek(); if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.ANNOTATION_TAG); return parseAnnotationTag(); } } private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword) { STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE); if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag, annotTag); } STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) { STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag, ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false); STNode annotTag = parseAnnotationTag(); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name; return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag); } /** * Parse the component that follows the first identifier in an annotation decl. The identifier * can be either the type-name (a user defined type) or the annot-tag, where the type-name * is not present. * * @param metadata Metadata * @param qualifier Qualifier that precedes the annotation decl * @param constKeyword Const keyword * @param annotationKeyword Annotation keyword * @param typeDescOrAnnotTag Identifier that follows the annotation-keyword * @return Parsed node */ private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) { STToken nextToken = peek(); STNode typeDesc; STNode annotTag; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeDesc = typeDescOrAnnotTag; annotTag = parseAnnotationTag(); break; case SEMICOLON_TOKEN: case ON_KEYWORD: typeDesc = STNodeFactory.createEmptyNode(); annotTag = typeDescOrAnnotTag; break; default: recover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag); } return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword, STNode annotationKeyword, STNode typeDesc, STNode annotTag) { STNode onKeyword; STNode attachPoints; STToken nextToken = peek(); switch (nextToken.kind) { case SEMICOLON_TOKEN: onKeyword = STNodeFactory.createEmptyNode(); attachPoints = STNodeFactory.createEmptyNodeList(); break; case ON_KEYWORD: onKeyword = parseOnKeyword(); attachPoints = parseAnnotationAttachPoints(); onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); break; default: recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag); } STNode semicolonToken = parseSemicolon(); return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword, typeDesc, annotTag, onKeyword, attachPoints, semicolonToken); } /** * Parse annotation attach points. * <p> * <code> * annot-attach-points := annot-attach-point (, annot-attach-point)* * <br/><br/> * annot-attach-point := dual-attach-point | source-only-attach-point * <br/><br/> * dual-attach-point := [source] dual-attach-point-ident * <br/><br/> * dual-attach-point-ident := * [object] type * | [object|resource] function * | parameter * | return * | service * | [object|record] field * <br/><br/> * source-only-attach-point := source source-only-attach-point-ident * <br/><br/> * source-only-attach-point-ident := * annotation * | external * | var * | const * | listener * | worker * </code> * * @return Parsed node */ private STNode parseAnnotationAttachPoints() { startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST); List<STNode> attachPoints = new ArrayList<>(); STToken nextToken = peek(); if (isEndAnnotAttachPointList(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode attachPoint = parseAnnotationAttachPoint(); attachPoints.add(attachPoint); nextToken = peek(); STNode leadingComma; while (!isEndAnnotAttachPointList(nextToken.kind)) { leadingComma = parseAttachPointEnd(); if (leadingComma == null) { break; } attachPoints.add(leadingComma); attachPoint = parseAnnotationAttachPoint(); if (attachPoint == null) { attachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN, DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT); attachPoints.add(attachPoint); break; } attachPoints.add(attachPoint); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(attachPoints); } /** * Parse annotation attach point end. * * @return Parsed node */ private STNode parseAttachPointEnd() { switch (peek().kind) { case SEMICOLON_TOKEN: return null; case COMMA_TOKEN: return consume(); default: recover(peek(), ParserRuleContext.ATTACH_POINT_END); return parseAttachPointEnd(); } } private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse annotation attach point. * * @return Parsed node */ private STNode parseAnnotationAttachPoint() { switch (peek().kind) { case EOF_TOKEN: return null; case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: case SOURCE_KEYWORD: STNode sourceKeyword = parseSourceKeyword(); return parseAttachPointIdent(sourceKeyword); case OBJECT_KEYWORD: case TYPE_KEYWORD: case RESOURCE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: case RECORD_KEYWORD: sourceKeyword = STNodeFactory.createEmptyNode(); STNode firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT); return parseAnnotationAttachPoint(); } } /** * Parse source keyword. * * @return Parsed node */ private STNode parseSourceKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SOURCE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SOURCE_KEYWORD); return parseSourceKeyword(); } } /** * Parse attach point ident gievn. * <p> * <code> * source-only-attach-point-ident := annotation | external | var | const | listener | worker * <br/><br/> * dual-attach-point-ident := [object] type | [object|resource] function | parameter * | return | service | [object|record] field * </code> * * @param sourceKeyword Source keyword * @return Parsed node */ private STNode parseAttachPointIdent(STNode sourceKeyword) { switch (peek().kind) { case ANNOTATION_KEYWORD: case EXTERNAL_KEYWORD: case VAR_KEYWORD: case CONST_KEYWORD: case LISTENER_KEYWORD: case WORKER_KEYWORD: STNode firstIdent = consume(); STNode secondIdent = STNodeFactory.createEmptyNode(); return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); case OBJECT_KEYWORD: case RESOURCE_KEYWORD: case RECORD_KEYWORD: case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: firstIdent = consume(); return parseDualAttachPointIdent(sourceKeyword, firstIdent); default: recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword); return parseAttachPointIdent(sourceKeyword); } } /** * Parse dual-attach-point ident. * * @param sourceKeyword Source keyword * @param firstIdent first part of the dual attach-point * @return Parsed node */ private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) { STNode secondIdent; switch (firstIdent.kind) { case OBJECT_KEYWORD: secondIdent = parseIdentAfterObjectIdent(); break; case RESOURCE_KEYWORD: secondIdent = parseFunctionIdent(); break; case RECORD_KEYWORD: secondIdent = parseFieldIdent(); break; case TYPE_KEYWORD: case FUNCTION_KEYWORD: case PARAMETER_KEYWORD: case RETURN_KEYWORD: case SERVICE_KEYWORD: case FIELD_KEYWORD: default: secondIdent = STNodeFactory.createEmptyNode(); break; } return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent); } /** * Parse the idents that are supported after object-ident. * * @return Parsed node */ private STNode parseIdentAfterObjectIdent() { STToken token = peek(); switch (token.kind) { case TYPE_KEYWORD: case FUNCTION_KEYWORD: case FIELD_KEYWORD: return consume(); default: recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT); return parseIdentAfterObjectIdent(); } } /** * Parse function ident. * * @return Parsed node */ private STNode parseFunctionIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FUNCTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FUNCTION_IDENT); return parseFunctionIdent(); } } /** * Parse field ident. * * @return Parsed node */ private STNode parseFieldIdent() { STToken token = peek(); if (token.kind == SyntaxKind.FIELD_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FIELD_IDENT); return parseFieldIdent(); } } /** * Parse XML namespace declaration. * <p> * <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ; * <br/> * xml-namespace-uri := simple-const-expr * <br/> * xml-namespace-prefix := identifier * </code> * * @return */ private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) { startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION); STNode xmlnsKeyword = parseXMLNSKeyword(); STNode namespaceUri = parseSimpleConstExpr(); while (!isValidXMLNameSpaceURI(namespaceUri)) { xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri, DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI); namespaceUri = parseSimpleConstExpr(); } STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); endContext(); return xmlnsDecl; } /** * Parse xmlns keyword. * * @return Parsed node */ private STNode parseXMLNSKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XMLNS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XMLNS_KEYWORD); return parseXMLNSKeyword(); } } private boolean isValidXMLNameSpaceURI(STNode expr) { switch (expr.kind) { case STRING_LITERAL: case QUALIFIED_NAME_REFERENCE: case SIMPLE_NAME_REFERENCE: return true; case IDENTIFIER_TOKEN: default: return false; } } private STNode parseSimpleConstExpr() { startContext(ParserRuleContext.CONSTANT_EXPRESSION); STNode expr = parseSimpleConstExprInternal(); endContext(); return expr; } /** * Parse simple constants expr. * * @return Parsed node */ private STNode parseSimpleConstExprInternal() { switch (peek().kind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: return parseBasicLiteral(); case IDENTIFIER_TOKEN: return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); case PLUS_TOKEN: case MINUS_TOKEN: return parseSignedIntOrFloat(); case OPEN_PAREN_TOKEN: return parseNilLiteral(); default: STToken token = peek(); recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START); return parseSimpleConstExprInternal(); } } /** * Parse the portion after the namsepsace-uri of an XML declaration. * * @param xmlnsKeyword XMLNS keyword * @param namespaceUri Namespace URI * @return Parsed node */ private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) { STNode asKeyword = STNodeFactory.createEmptyNode(); STNode namespacePrefix = STNodeFactory.createEmptyNode(); switch (peek().kind) { case AS_KEYWORD: asKeyword = parseAsKeyword(); namespacePrefix = parseNamespacePrefix(); break; case SEMICOLON_TOKEN: break; default: recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar); return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar); } STNode semicolon = parseSemicolon(); if (isModuleVar) { return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix, semicolon); } /** * Parse import prefix. * * @return Parsed node */ private STNode parseNamespacePrefix() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.NAMESPACE_PREFIX); return parseNamespacePrefix(); } } /** * Parse named worker declaration. * <p> * <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code> * * @param annots Annotations attached to the worker decl * @return Parsed node */ private STNode parseNamedWorkerDeclaration(STNode annots) { startContext(ParserRuleContext.NAMED_WORKER_DECL); STNode workerKeyword = parseWorkerKeyword(); STNode workerName = parseWorkerName(); STNode returnTypeDesc = parseReturnTypeDescriptor(); STNode workerBody = parseBlockNode(); endContext(); return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc, workerBody); } private STNode parseReturnTypeDescriptor() { STToken token = peek(); if (token.kind != SyntaxKind.RETURNS_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode returnsKeyword = consume(); STNode annot = parseOptionalAnnotations(); STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC); return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type); } /** * Parse worker keyword. * * @return Parsed node */ private STNode parseWorkerKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_KEYWORD); return parseWorkerKeyword(); } } /** * Parse worker name. * <p> * <code>worker-name := identifier</code> * * @return Parsed node */ private STNode parseWorkerName() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) { return consume(); } else { recover(peek(), ParserRuleContext.WORKER_NAME); return parseWorkerName(); } } /** * Parse lock statement. * <code>lock-stmt := lock block-stmt ;</code> * * @return Lock statement */ private STNode parseLockStatement() { startContext(ParserRuleContext.LOCK_STMT); STNode lockKeyword = parseLockKeyword(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause); } /** * Parse lock-keyword. * * @return lock-keyword node */ private STNode parseLockKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LOCK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LOCK_KEYWORD); return parseLockKeyword(); } } /** * Parse union type descriptor. * union-type-descriptor := type-descriptor | type-descriptor * * @param leftTypeDesc Type desc in the LHS os the union type desc. * @param context Current context. * @return parsed union type desc node */ private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeToken = parsePipeToken(); STNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false); return createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc); } private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc); } /** * Parse pipe token. * * @return parsed pipe token node */ private STNode parsePipeToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.PIPE); return parsePipeToken(); } } private boolean isTypeStartingToken(SyntaxKind nodeKind) { switch (nodeKind) { case IDENTIFIER_TOKEN: case SERVICE_KEYWORD: case RECORD_KEYWORD: case OBJECT_KEYWORD: case ABSTRACT_KEYWORD: case CLIENT_KEYWORD: case OPEN_PAREN_TOKEN: case MAP_KEYWORD: case FUTURE_KEYWORD: case TYPEDESC_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TABLE_KEYWORD: case FUNCTION_KEYWORD: case OPEN_BRACKET_TOKEN: case DISTINCT_KEYWORD: return true; default: if (isSingletonTypeDescStart(nodeKind, true)) { return true; } return isSimpleType(nodeKind); } } static boolean isSimpleType(SyntaxKind nodeKind) { switch (nodeKind) { case INT_KEYWORD: case FLOAT_KEYWORD: case DECIMAL_KEYWORD: case BOOLEAN_KEYWORD: case STRING_KEYWORD: case BYTE_KEYWORD: case XML_KEYWORD: case JSON_KEYWORD: case HANDLE_KEYWORD: case ANY_KEYWORD: case ANYDATA_KEYWORD: case NEVER_KEYWORD: case SERVICE_KEYWORD: case VAR_KEYWORD: case ERROR_KEYWORD: case STREAM_KEYWORD: case TYPEDESC_KEYWORD: case READONLY_KEYWORD: case DISTINCT_KEYWORD: return true; default: return false; } } private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) { switch (typeKeyword) { case INT_KEYWORD: return SyntaxKind.INT_TYPE_DESC; case FLOAT_KEYWORD: return SyntaxKind.FLOAT_TYPE_DESC; case DECIMAL_KEYWORD: return SyntaxKind.DECIMAL_TYPE_DESC; case BOOLEAN_KEYWORD: return SyntaxKind.BOOLEAN_TYPE_DESC; case STRING_KEYWORD: return SyntaxKind.STRING_TYPE_DESC; case BYTE_KEYWORD: return SyntaxKind.BYTE_TYPE_DESC; case XML_KEYWORD: return SyntaxKind.XML_TYPE_DESC; case JSON_KEYWORD: return SyntaxKind.JSON_TYPE_DESC; case HANDLE_KEYWORD: return SyntaxKind.HANDLE_TYPE_DESC; case ANY_KEYWORD: return SyntaxKind.ANY_TYPE_DESC; case ANYDATA_KEYWORD: return SyntaxKind.ANYDATA_TYPE_DESC; case READONLY_KEYWORD: return SyntaxKind.READONLY_TYPE_DESC; case NEVER_KEYWORD: return SyntaxKind.NEVER_TYPE_DESC; case SERVICE_KEYWORD: return SyntaxKind.SERVICE_TYPE_DESC; case VAR_KEYWORD: return SyntaxKind.VAR_TYPE_DESC; case ERROR_KEYWORD: return SyntaxKind.ERROR_TYPE_DESC; default: return SyntaxKind.TYPE_REFERENCE; } } /** * Parse fork-keyword. * * @return Fork-keyword node */ private STNode parseForkKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FORK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FORK_KEYWORD); return parseForkKeyword(); } } /** * Parse fork statement. * <code>fork-stmt := fork { named-worker-decl+ }</code> * * @return Fork statement */ private STNode parseForkStatement() { startContext(ParserRuleContext.FORK_STMT); STNode forkKeyword = parseForkKeyword(); STNode openBrace = parseOpenBrace(); ArrayList<STNode> workers = new ArrayList<>(); while (!isEndOfStatements()) { STNode stmt = parseStatement(); if (stmt == null) { break; } switch (stmt.kind) { case NAMED_WORKER_DECLARATION: workers.add(stmt); break; default: if (workers.isEmpty()) { openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } else { updateLastNodeInListWithInvalidNode(workers, stmt, DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE); } } } STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers); STNode closeBrace = parseCloseBrace(); endContext(); STNode forkStmt = STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace); if (isNodeListEmpty(namedWorkerDeclarations)) { return SyntaxErrors.addDiagnostic(forkStmt, DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT); } return forkStmt; } /** * Parse trap expression. * <p> * <code> * trap-expr := trap expression * </code> * * @param allowActions Allow actions * @param isRhsExpr Whether this is a RHS expression or not * @return Trap expression node */ private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { STNode trapKeyword = parseTrapKeyword(); STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); if (isAction(expr)) { return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr); } return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr); } /** * Parse trap-keyword. * * @return Trap-keyword node */ private STNode parseTrapKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRAP_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRAP_KEYWORD); return parseTrapKeyword(); } } /** * Parse list constructor expression. * <p> * <code> * list-constructor-expr := [ [ expr-list ] ] * <br/> * expr-list := expression (, expression)* * </code> * * @return Parsed node */ private STNode parseListConstructorExpr() { startContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode expressions = parseOptionalExpressionsList(); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); } /** * Parse optional expression list. * * @return Parsed node */ private STNode parseOptionalExpressionsList() { List<STNode> expressions = new ArrayList<>(); if (isEndOfListConstructor(peek().kind)) { return STNodeFactory.createEmptyNodeList(); } STNode expr = parseExpression(); expressions.add(expr); return parseOptionalExpressionsList(expressions); } private STNode parseOptionalExpressionsList(List<STNode> expressions) { STNode listConstructorMemberEnd; while (!isEndOfListConstructor(peek().kind)) { listConstructorMemberEnd = parseListConstructorMemberEnd(); if (listConstructorMemberEnd == null) { break; } expressions.add(listConstructorMemberEnd); STNode expr = parseExpression(); expressions.add(expr); } return STNodeFactory.createNodeList(expressions); } private boolean isEndOfListConstructor(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } private STNode parseListConstructorMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END); return parseListConstructorMemberEnd(); } } /** * Parse foreach statement. * <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt</code> * * @return foreach statement */ private STNode parseForEachStatement() { startContext(ParserRuleContext.FOREACH_STMT); STNode forEachKeyword = parseForEachKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT); STNode inKeyword = parseInKeyword(); STNode actionOrExpr = parseActionOrExpression(); STNode blockStatement = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr, blockStatement, onFailClause); } /** * Parse foreach-keyword. * * @return ForEach-keyword node */ private STNode parseForEachKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FOREACH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FOREACH_KEYWORD); return parseForEachKeyword(); } } /** * Parse in-keyword. * * @return In-keyword node */ private STNode parseInKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.IN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.IN_KEYWORD); return parseInKeyword(); } } /** * Parse type cast expression. * <p> * <code> * type-cast-expr := < type-cast-param > expression * <br/> * type-cast-param := [annots] type-descriptor | annots * </code> * * @return Parsed node */ private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) { startContext(ParserRuleContext.TYPE_CAST); STNode ltToken = parseLTToken(); STNode typeCastParam = parseTypeCastParam(); STNode gtToken = parseGTToken(); endContext(); STNode expression = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr); return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression); } private STNode parseTypeCastParam() { STNode annot; STNode type; STToken token = peek(); switch (token.kind) { case AT_TOKEN: annot = parseOptionalAnnotations(); token = peek(); if (isTypeStartingToken(token.kind)) { type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } else { type = STNodeFactory.createEmptyNode(); } break; default: annot = STNodeFactory.createEmptyNode(); type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); break; } return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type); } /** * Parse table constructor expression. * <p> * <code> * table-constructor-expr-rhs := [ [row-list] ] * </code> * * @param tableKeyword tableKeyword that precedes this rhs * @param keySpecifier keySpecifier that precedes this rhs * @return Parsed node */ private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) { switchContext(ParserRuleContext.TABLE_CONSTRUCTOR); STNode openBracket = parseOpenBracket(); STNode rowList = parseRowList(); STNode closeBracket = parseCloseBracket(); return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList, closeBracket); } /** * Parse table-keyword. * * @return Table-keyword node */ private STNode parseTableKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TABLE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TABLE_KEYWORD); return parseTableKeyword(); } } /** * Parse table rows. * <p> * <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code> * * @return Parsed node */ private STNode parseRowList() { STToken nextToken = peek(); if (isEndOfTableRowList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> mappings = new ArrayList<>(); STNode mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); STNode rowEnd; while (!isEndOfTableRowList(nextToken.kind)) { rowEnd = parseTableRowEnd(); if (rowEnd == null) { break; } mappings.add(rowEnd); mapExpr = parseMappingConstructorExpr(); mappings.add(mapExpr); nextToken = peek(); } return STNodeFactory.createNodeList(mappings); } private boolean isEndOfTableRowList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; case COMMA_TOKEN: case OPEN_BRACE_TOKEN: return false; default: return isEndOfMappingConstructor(tokenKind); } } private STNode parseTableRowEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.TABLE_ROW_END); return parseTableRowEnd(); } } /** * Parse key specifier. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier() { startContext(ParserRuleContext.KEY_SPECIFIER); STNode keyKeyword = parseKeyKeyword(); STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNames = parseFieldNames(); STNode closeParen = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen); } /** * Parse key-keyword. * * @return Key-keyword node */ private STNode parseKeyKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.KEY_KEYWORD) { return consume(); } if (isKeyKeyword(token)) { return getKeyKeyword(consume()); } recover(token, ParserRuleContext.KEY_KEYWORD); return parseKeyKeyword(); } static boolean isKeyKeyword(STToken token) { return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text()); } private STNode getKeyKeyword(STToken token) { return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(), token.diagnostics()); } /** * Parse field names. * <p> * <code>field-name-list := [ field-name (, field-name)* ]</code> * * @return Parsed node */ private STNode parseFieldNames() { STToken nextToken = peek(); if (isEndOfFieldNamesList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } List<STNode> fieldNames = new ArrayList<>(); STNode fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); STNode leadingComma; while (!isEndOfFieldNamesList(nextToken.kind)) { leadingComma = parseComma(); fieldNames.add(leadingComma); fieldName = parseVariableName(); fieldNames.add(fieldName); nextToken = peek(); } return STNodeFactory.createNodeList(fieldNames); } private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case IDENTIFIER_TOKEN: return false; default: return true; } } /** * Parse error type descriptor. * <p> * error-type-descriptor := error [error-type-param] * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeDescriptor() { STNode errorKeywordToken = parseErrorKeyword(); return parseErrorTypeDescriptor(errorKeywordToken); } private STNode parseErrorTypeDescriptor(STNode errorKeywordToken) { STNode errorTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { errorTypeParamsNode = parseErrorTypeParamsNode(); } else { errorTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode); } /** * Parse error type param node. * <p> * error-type-param := < (detail-type-descriptor | inferred-type-descriptor) > * detail-type-descriptor := type-descriptor * inferred-type-descriptor := * * </p> * * @return Parsed node */ private STNode parseErrorTypeParamsNode() { STNode ltToken = parseLTToken(); STNode parameter; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { parameter = consume(); } else { parameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); } STNode gtToken = parseGTToken(); return STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken); } /** * Parse error-keyword. * * @return Parsed error-keyword node */ private STNode parseErrorKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ERROR_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ERROR_KEYWORD); return parseErrorKeyword(); } } /** * Parse typedesc type descriptor. * typedesc-type-descriptor := typedesc type-parameter * * @return Parsed typedesc type node */ private STNode parseTypedescTypeDescriptor() { STNode typedescKeywordToken = parseTypedescKeyword(); STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode); } /** * Parse typedesc-keyword. * * @return Parsed typedesc-keyword node */ private STNode parseTypedescKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TYPEDESC_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TYPEDESC_KEYWORD); return parseTypedescKeyword(); } } /** * Parse stream type descriptor. * <p> * stream-type-descriptor := stream [stream-type-parameters] * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type descriptor node */ private STNode parseStreamTypeDescriptor() { STNode streamKeywordToken = parseStreamKeyword(); STNode streamTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { streamTypeParamsNode = parseStreamTypeParamsNode(); } else { streamTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode); } /** * Parse xml type descriptor. * xml-type-descriptor := xml type-parameter * * @return Parsed typedesc type node */ private STNode parseXmlTypeDescriptor() { STNode xmlKeywordToken = parseXMLKeyword(); STNode typedescTypeParamsNode; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.LT_TOKEN) { typedescTypeParamsNode = parseTypeParameter(); } else { typedescTypeParamsNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode); } /** * Parse stream type params node. * <p> * stream-type-parameters := < type-descriptor [, type-descriptor]> * </p> * * @return Parsed stream type params node */ private STNode parseStreamTypeParamsNode() { STNode ltToken = parseLTToken(); startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC); STNode leftTypeDescNode = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false); STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode); endContext(); return streamTypedesc; } private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) { STNode commaToken, rightTypeDescNode, gtToken; switch (peek().kind) { case COMMA_TOKEN: commaToken = parseComma(); rightTypeDescNode = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC, false); break; case GT_TOKEN: commaToken = STNodeFactory.createEmptyNode(); rightTypeDescNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode); return parseStreamTypeParamsNode(ltToken, leftTypeDescNode); } gtToken = parseGTToken(); return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode, gtToken); } /** * Parse stream-keyword. * * @return Parsed stream-keyword node */ private STNode parseStreamKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STREAM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STREAM_KEYWORD); return parseStreamKeyword(); } } /** * Parse let expression. * <p> * <code> * let-expr := let let-var-decl [, let-var-decl]* in expression * </code> * * @return Parsed node */ private STNode parseLetExpression(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr); STNode inKeyword = parseInKeyword(); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression); } /** * Parse let-keyword. * * @return Let-keyword node */ private STNode parseLetKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.LET_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.LET_KEYWORD); return parseLetKeyword(); } } /** * Parse let variable declarations. * <p> * <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code> * * @return Parsed node */ private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) { startContext(context); List<STNode> varDecls = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfLetVarDeclarations(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); STNode leadingComma; while (!isEndOfLetVarDeclarations(nextToken.kind)) { leadingComma = parseComma(); varDecls.add(leadingComma); varDec = parseLetVarDecl(isRhsExpr); varDecls.add(varDec); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(varDecls); } private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case AT_TOKEN: return false; case IN_KEYWORD: return true; default: return !isTypeStartingToken(tokenKind); } } /** * Parse let variable declaration. * <p> * <code>let-var-decl := [annots] typed-binding-pattern = expression</code> * * @return Parsed node */ private STNode parseLetVarDecl(boolean isRhsExpr) { STNode annot = parseOptionalAnnotations(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL); STNode assign = parseAssignOp(); STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false); return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression); } /** * Parse raw backtick string template expression. * <p> * <code>BacktickString := `expression`</code> * * @return Template expression node */ private STNode parseTemplateExpression() { STNode type = STNodeFactory.createEmptyNode(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } private STNode parseTemplateContent() { List<STNode> items = new ArrayList<>(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); items.add(contentItem); nextToken = peek(); } return STNodeFactory.createNodeList(items); } private boolean isEndOfBacktickContent(SyntaxKind kind) { switch (kind) { case EOF_TOKEN: case BACKTICK_TOKEN: return true; default: return false; } } private STNode parseTemplateItem() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return parseInterpolation(); } return consume(); } /** * Parse string template expression. * <p> * <code>string-template-expr := string ` expression `</code> * * @return String template expression node */ private STNode parseStringTemplateExpression() { STNode type = parseStringKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContent(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick, content, endingBackTick); } /** * Parse <code>string</code> keyword. * * @return string keyword node */ private STNode parseStringKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.STRING_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.STRING_KEYWORD); return parseStringKeyword(); } } /** * Parse XML template expression. * <p> * <code>xml-template-expr := xml BacktickString</code> * * @return XML template expression */ private STNode parseXMLTemplateExpression() { STNode xmlKeyword = parseXMLKeyword(); STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); STNode content = parseTemplateContentAsXML(); STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword, startingBackTick, content, endingBackTick); } /** * Parse <code>xml</code> keyword. * * @return xml keyword node */ private STNode parseXMLKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.XML_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.XML_KEYWORD); return parseXMLKeyword(); } } /** * Parse the content of the template string as XML. This method first read the * input in the same way as the raw-backtick-template (BacktickString). Then * it parses the content as XML. * * @return XML node */ private STNode parseTemplateContentAsXML() { ArrayDeque<STNode> expressions = new ArrayDeque<>(); StringBuilder xmlStringBuilder = new StringBuilder(); STToken nextToken = peek(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode contentItem = parseTemplateItem(); if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) { xmlStringBuilder.append(((STToken) contentItem).text()); } else { xmlStringBuilder.append("${}"); expressions.add(contentItem); } nextToken = peek(); } CharReader charReader = CharReader.from(xmlStringBuilder.toString()); AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader)); XMLParser xmlParser = new XMLParser(tokenReader, expressions); return xmlParser.parse(); } /** * Parse interpolation of a back-tick string. * <p> * <code> * interpolation := ${ expression } * </code> * * @return Interpolation node */ private STNode parseInterpolation() { startContext(ParserRuleContext.INTERPOLATION); STNode interpolStart = parseInterpolationStart(); STNode expr = parseExpression(); while (true) { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.EOF_TOKEN || nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN) { break; } else { nextToken = consume(); expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken, DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text()); } } STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace); } /** * Parse interpolation start token. * <p> * <code>interpolation-start := ${</code> * * @return Interpolation start token */ private STNode parseInterpolationStart() { STToken token = peek(); if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN); return parseInterpolationStart(); } } /** * Parse back-tick token. * * @return Back-tick token */ private STNode parseBacktickToken(ParserRuleContext ctx) { STToken token = peek(); if (token.kind == SyntaxKind.BACKTICK_TOKEN) { return consume(); } else { recover(token, ctx); return parseBacktickToken(ctx); } } /** * Parse table type descriptor. * <p> * table-type-descriptor := table row-type-parameter [key-constraint] * row-type-parameter := type-parameter * key-constraint := key-specifier | key-type-constraint * key-specifier := key ( [ field-name (, field-name)* ] ) * key-type-constraint := key type-parameter * </p> * * @return Parsed table type desc node. */ private STNode parseTableTypeDescriptor() { STNode tableKeywordToken = parseTableKeyword(); STNode rowTypeParameterNode = parseRowTypeParameter(); STNode keyConstraintNode; STToken nextToken = peek(); if (isKeyKeyword(nextToken)) { STNode keyKeywordToken = getKeyKeyword(consume()); keyConstraintNode = parseKeyConstraint(keyKeywordToken); } else { keyConstraintNode = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode); } /** * Parse row type parameter node. * <p> * row-type-parameter := type-parameter * </p> * * @return Parsed node. */ private STNode parseRowTypeParameter() { startContext(ParserRuleContext.ROW_TYPE_PARAM); STNode rowTypeParameterNode = parseTypeParameter(); endContext(); return rowTypeParameterNode; } /** * Parse type parameter node. * <p> * type-parameter := < type-descriptor > * </p> * * @return Parsed node */ private STNode parseTypeParameter() { STNode ltToken = parseLTToken(); STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS); STNode gtToken = parseGTToken(); return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken); } /** * Parse key constraint. * <p> * key-constraint := key-specifier | key-type-constraint * </p> * * @return Parsed node. */ private STNode parseKeyConstraint(STNode keyKeywordToken) { switch (peek().kind) { case OPEN_PAREN_TOKEN: return parseKeySpecifier(keyKeywordToken); case LT_TOKEN: return parseKeyTypeConstraint(keyKeywordToken); default: recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken); return parseKeyConstraint(keyKeywordToken); } } /** * Parse key specifier given parsed key keyword token. * <p> * <code>key-specifier := key ( [ field-name (, field-name)* ] )</code> * * @return Parsed node */ private STNode parseKeySpecifier(STNode keyKeywordToken) { startContext(ParserRuleContext.KEY_SPECIFIER); STNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode fieldNamesNode = parseFieldNames(); STNode closeParenToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken); } /** * Parse key type constraint. * <p> * key-type-constraint := key type-parameter * </p> * * @return Parsed node */ private STNode parseKeyTypeConstraint(STNode keyKeywordToken) { STNode typeParameterNode = parseTypeParameter(); return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode); } /** * Parse function type descriptor. * <p> * <code>function-type-descriptor := function function-signature</code> * * @return Function type descriptor node */ private STNode parseFunctionTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC); STNode functionKeyword = parseFunctionKeyword(); STNode signature = parseFuncSignature(true); endContext(); return STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature); } /** * Parse explicit anonymous function expression. * <p> * <code>explicit-anonymous-function-expr := [annots] function function-signature anon-func-body</code> * * @param annots Annotations. * @param isRhsExpr Is expression in rhs context * @return Anonymous function expression node */ private STNode parseExplicitFunctionExpression(STNode annots, boolean isRhsExpr) { startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); STNode funcKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(false); STNode funcBody = parseAnonFuncBody(isRhsExpr); return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature, funcBody); } /** * Parse anonymous function body. * <p> * <code>anon-func-body := block-function-body | expr-function-body</code> * * @param isRhsExpr Is expression in rhs context * @return Anon function body node */ private STNode parseAnonFuncBody(boolean isRhsExpr) { switch (peek().kind) { case OPEN_BRACE_TOKEN: case EOF_TOKEN: STNode body = parseFunctionBodyBlock(true); endContext(); return body; case RIGHT_DOUBLE_ARROW_TOKEN: endContext(); return parseExpressionFuncBody(true, isRhsExpr); default: recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr); return parseAnonFuncBody(isRhsExpr); } } /** * Parse expression function body. * <p> * <code>expr-function-body := => expression</code> * * @param isAnon Is anonymous function. * @param isRhsExpr Is expression in rhs context * @return Expression function body node */ private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) { STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode semiColon; if (isAnon) { semiColon = STNodeFactory.createEmptyNode(); } else { semiColon = parseSemicolon(); } return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon); } /** * Parse '=>' token. * * @return Double right arrow token */ private STNode parseDoubleRightArrow() { STToken token = peek(); if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.EXPR_FUNC_BODY_START); return parseDoubleRightArrow(); } } private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) { switch (params.kind) { case SIMPLE_NAME_REFERENCE: case INFER_PARAM_LIST: break; case BRACED_EXPRESSION: params = getAnonFuncParam((STBracedExpressionNode) params); break; default: STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params, DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR); params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam); } STNode rightDoubleArrow = parseDoubleRightArrow(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression); } /** * Create a new anon-func-param node from a braced expression. * * @param params Braced expression * @return Anon-func param node */ private STNode getAnonFuncParam(STBracedExpressionNode params) { List<STNode> paramList = new ArrayList<>(); paramList.add(params.expression); return STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen, STNodeFactory.createNodeList(paramList), params.closeParen); } /** * Parse implicit anon function expression. * * @param openParen Open parenthesis token * @param firstParam First parameter * @param isRhsExpr Is expression in rhs context * @return Implicit anon function expression node */ private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) { List<STNode> paramList = new ArrayList<>(); paramList.add(firstParam); STToken nextToken = peek(); STNode paramEnd; STNode param; while (!isEndOfAnonFuncParametersList(nextToken.kind)) { paramEnd = parseImplicitAnonFuncParamEnd(); if (paramEnd == null) { break; } paramList.add(paramEnd); param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM); param = STNodeFactory.createSimpleNameReferenceNode(param); paramList.add(param); nextToken = peek(); } STNode params = STNodeFactory.createNodeList(paramList); STNode closeParen = parseCloseParenthesis(); endContext(); STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); return parseImplicitAnonFunc(inferedParams, isRhsExpr); } private STNode parseImplicitAnonFuncParamEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS); return parseImplicitAnonFuncParamEnd(); } } private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) { switch (tokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case SEMICOLON_TOKEN: case RETURNS_KEYWORD: case TYPE_KEYWORD: case LISTENER_KEYWORD: case IF_KEYWORD: case WHILE_KEYWORD: case DO_KEYWORD: case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: return true; default: return false; } } /** * Parse tuple type descriptor. * <p> * <code>tuple-type-descriptor := [ tuple-member-type-descriptors ] * <br/><br/> * tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor] * | [ tuple-rest-descriptor ] * <br/><br/> * tuple-rest-descriptor := type-descriptor ... * </code> * * @return */ private STNode parseTupleTypeDesc() { STNode openBracket = parseOpenBracket(); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDesc = parseTupleMemberTypeDescList(); STNode closeBracket = parseCloseBracket(); endContext(); openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket, DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket); } /** * Parse tuple member type descriptors. * * @return Parsed node */ private STNode parseTupleMemberTypeDescList() { List<STNode> typeDescList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfTypeList(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode typeDesc = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_TUPLE, false); return parseTupleTypeMembers(typeDesc, typeDescList); } private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) { STToken nextToken; nextToken = peek(); STNode tupleMemberRhs; while (!isEndOfTypeList(nextToken.kind)) { tupleMemberRhs = parseTupleMemberRhs(); if (tupleMemberRhs == null) { break; } if (tupleMemberRhs.kind == SyntaxKind.ELLIPSIS_TOKEN) { typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs); break; } typeDescList.add(typeDesc); typeDescList.add(tupleMemberRhs); typeDesc = parseTypeDescriptorWithoutContext(ParserRuleContext.TYPE_DESC_IN_TUPLE, false); nextToken = peek(); } typeDescList.add(typeDesc); return STNodeFactory.createNodeList(typeDescList); } private STNode parseTupleMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; case ELLIPSIS_TOKEN: return parseEllipsis(); default: recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS); return parseTupleMemberRhs(); } } private boolean isEndOfTypeList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case EOF_TOKEN: case EQUAL_TOKEN: case SEMICOLON_TOKEN: return true; default: return false; } } /** * Parse table constructor or query expression. * <p> * <code> * table-constructor-or-query-expr := table-constructor-expr | query-expr * <br/> * table-constructor-expr := table [key-specifier] [ [row-list] ] * <br/> * query-expr := [query-construct-type] query-pipeline select-clause * [query-construct-type] query-pipeline select-clause on-conflict-clause? limit-clause? * <br/> * query-construct-type := table key-specifier | stream * </code> * * @return Parsed node */ private STNode parseTableConstructorOrQuery(boolean isRhsExpr) { startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION); STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr); endContext(); return tableOrQueryExpr; } private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) { STNode queryConstructType; switch (peek().kind) { case FROM_KEYWORD: queryConstructType = STNodeFactory.createEmptyNode(); return parseQueryExprRhs(queryConstructType, isRhsExpr); case STREAM_KEYWORD: queryConstructType = parseQueryConstructType(parseStreamKeyword(), null); return parseQueryExprRhs(queryConstructType, isRhsExpr); case TABLE_KEYWORD: STNode tableKeyword = parseTableKeyword(); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr); return parseTableConstructorOrQueryInternal(isRhsExpr); } } private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) { STNode keySpecifier; STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: keySpecifier = STNodeFactory.createEmptyNode(); return parseTableConstructorExprRhs(tableKeyword, keySpecifier); case KEY_KEYWORD: keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); case IDENTIFIER_TOKEN: if (isKeyKeyword(nextToken)) { keySpecifier = parseKeySpecifier(); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } break; default: break; } recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr); return parseTableConstructorOrQuery(tableKeyword, isRhsExpr); } private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr); case OPEN_BRACKET_TOKEN: return parseTableConstructorExprRhs(tableKeyword, keySpecifier); default: recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier, isRhsExpr); return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr); } } /** * Parse query construct type. * <p> * <code>query-construct-type := table key-specifier | stream</code> * * @return Parsed node */ private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) { return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier); } /** * Parse query expression. * <p> * <code> * query-expr-rhs := query-pipeline select-clause * query-pipeline select-clause on-conflict-clause? limit-clause? * <br/> * query-pipeline := from-clause intermediate-clause* * </code> * * @param queryConstructType queryConstructType that precedes this rhs * @return Parsed node */ private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) { switchContext(ParserRuleContext.QUERY_EXPRESSION); STNode fromClause = parseFromClause(isRhsExpr); List<STNode> clauses = new ArrayList<>(); STNode intermediateClause; STNode selectClause = null; while (!isEndOfIntermediateClause(peek().kind)) { intermediateClause = parseIntermediateClause(isRhsExpr); if (intermediateClause == null) { break; } if (selectClause != null) { selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause, DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE); continue; } if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) { selectClause = intermediateClause; } else { clauses.add(intermediateClause); } } if (peek().kind == SyntaxKind.DO_KEYWORD) { STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); return parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr); } if (selectClause == null) { STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD); STNode expr = STNodeFactory .createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr); if (clauses.isEmpty()) { fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); } else { int lastIndex = clauses.size() - 1; STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex), DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE); clauses.set(lastIndex, intClauseWithDiagnostic); } } STNode intermediateClauses = STNodeFactory.createNodeList(clauses); STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses); STNode onConflictClause = parseOnConflictClause(isRhsExpr); STNode limitClause = parseLimitClause(isRhsExpr); return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause, onConflictClause, limitClause); } /** * Parse an intermediate clause. * <p> * <code> * intermediate-clause := from-clause | where-clause | let-clause | join-clause | order-by-clause * </code> * * @return Parsed node */ private STNode parseIntermediateClause(boolean isRhsExpr) { switch (peek().kind) { case FROM_KEYWORD: return parseFromClause(isRhsExpr); case WHERE_KEYWORD: return parseWhereClause(isRhsExpr); case LET_KEYWORD: return parseLetClause(isRhsExpr); case SELECT_KEYWORD: return parseSelectClause(isRhsExpr); case JOIN_KEYWORD: case OUTER_KEYWORD: case EQUALS_KEYWORD: return parseJoinClause(isRhsExpr); case ORDER_KEYWORD: case BY_KEYWORD: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return parseOrderByClause(isRhsExpr); case DO_KEYWORD: case SEMICOLON_TOKEN: case ON_KEYWORD: case CONFLICT_KEYWORD: case LIMIT_KEYWORD: return null; default: recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr); return parseIntermediateClause(isRhsExpr); } } /** * Parse join-keyword. * * @return Join-keyword node */ private STNode parseJoinKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.JOIN_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.JOIN_KEYWORD); return parseJoinKeyword(); } } /** * Parse equals keyword. * * @return Parsed node */ private STNode parseEqualsKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.EQUALS_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.EQUALS_KEYWORD); return parseEqualsKeyword(); } } private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) { switch (tokenKind) { case CLOSE_BRACE_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case SEMICOLON_TOKEN: case PUBLIC_KEYWORD: case FUNCTION_KEYWORD: case EOF_TOKEN: case RESOURCE_KEYWORD: case LISTENER_KEYWORD: case DOCUMENTATION_STRING: case PRIVATE_KEYWORD: case RETURNS_KEYWORD: case SERVICE_KEYWORD: case TYPE_KEYWORD: case CONST_KEYWORD: case FINAL_KEYWORD: case DO_KEYWORD: return true; default: return isValidExprRhsStart(tokenKind, SyntaxKind.NONE); } } /** * Parse from clause. * <p> * <code>from-clause := from typed-binding-pattern in expression</code> * * @return Parsed node */ private STNode parseFromClause(boolean isRhsExpr) { STNode fromKeyword = parseFromKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression); } /** * Parse from-keyword. * * @return From-keyword node */ private STNode parseFromKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FROM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FROM_KEYWORD); return parseFromKeyword(); } } /** * Parse where clause. * <p> * <code>where-clause := where expression</code> * * @return Parsed node */ private STNode parseWhereClause(boolean isRhsExpr) { STNode whereKeyword = parseWhereKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createWhereClauseNode(whereKeyword, expression); } /** * Parse where-keyword. * * @return Where-keyword node */ private STNode parseWhereKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WHERE_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WHERE_KEYWORD); return parseWhereKeyword(); } } /** * Parse let clause. * <p> * <code>let-clause := let let-var-decl [, let-var-decl]* </code> * * @return Parsed node */ private STNode parseLetClause(boolean isRhsExpr) { STNode letKeyword = parseLetKeyword(); STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr); letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword, DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION); return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations); } /** * Parse order-keyword. * * @return Order-keyword node */ private STNode parseOrderKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ORDER_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ORDER_KEYWORD); return parseOrderKeyword(); } } /** * Parse by-keyword. * * @return By-keyword node */ private STNode parseByKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.BY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BY_KEYWORD); return parseByKeyword(); } } /** * Parse order by clause. * <p> * <code>order-by-clause := order by order-key-list * </code> * * @return Parsed node */ private STNode parseOrderByClause(boolean isRhsExpr) { STNode orderKeyword = parseOrderKeyword(); STNode byKeyword = parseByKeyword(); STNode orderKeys = parseOrderKeyList(isRhsExpr); byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY); return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys); } /** * Parse order key. * <p> * <code>order-key-list := order-key [, order-key]*</code> * * @return Parsed node */ private STNode parseOrderKeyList(boolean isRhsExpr) { startContext(ParserRuleContext.ORDER_KEY_LIST); List<STNode> orderKeys = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfOrderKeys(nextToken.kind)) { endContext(); return STNodeFactory.createEmptyNodeList(); } STNode orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); STNode orderKeyListMemberEnd; while (!isEndOfOrderKeys(nextToken.kind)) { orderKeyListMemberEnd = parseOrderKeyListMemberEnd(); if (orderKeyListMemberEnd == null) { break; } orderKeys.add(orderKeyListMemberEnd); orderKey = parseOrderKey(isRhsExpr); orderKeys.add(orderKey); nextToken = peek(); } endContext(); return STNodeFactory.createNodeList(orderKeys); } private boolean isEndOfOrderKeys(SyntaxKind tokenKind) { switch (tokenKind) { case COMMA_TOKEN: case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: return false; case SEMICOLON_TOKEN: case EOF_TOKEN: return true; default: return isQueryClauseStartToken(tokenKind); } } private boolean isQueryClauseStartToken(SyntaxKind tokenKind) { switch (tokenKind) { case SELECT_KEYWORD: case LET_KEYWORD: case WHERE_KEYWORD: case OUTER_KEYWORD: case JOIN_KEYWORD: case ORDER_KEYWORD: case DO_KEYWORD: case FROM_KEYWORD: case LIMIT_KEYWORD: return true; default: return false; } } private STNode parseOrderKeyListMemberEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case COMMA_TOKEN: return parseComma(); case EOF_TOKEN: return null; default: if (isQueryClauseStartToken(nextToken.kind)) { return null; } recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END); return parseOrderKeyListMemberEnd(); } } /** * Parse order key. * <p> * <code>order-key := expression (ascending | descending)?</code> * * @return Parsed node */ private STNode parseOrderKey(boolean isRhsExpr) { STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); STNode orderDirection; STToken nextToken = peek(); switch (nextToken.kind) { case ASCENDING_KEYWORD: case DESCENDING_KEYWORD: orderDirection = consume(); break; default: orderDirection = STNodeFactory.createEmptyNode(); } return STNodeFactory.createOrderKeyNode(expression, orderDirection); } /** * Parse select clause. * <p> * <code>select-clause := select expression</code> * * @return Parsed node */ private STNode parseSelectClause(boolean isRhsExpr) { startContext(ParserRuleContext.SELECT_CLAUSE); STNode selectKeyword = parseSelectKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createSelectClauseNode(selectKeyword, expression); } /** * Parse select-keyword. * * @return Select-keyword node */ private STNode parseSelectKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.SELECT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.SELECT_KEYWORD); return parseSelectKeyword(); } } /** * Parse on-conflict clause. * <p> * <code> * onConflictClause := on conflict expression * </code> * * @return On conflict clause node */ private STNode parseOnConflictClause(boolean isRhsExpr) { startContext(ParserRuleContext.ON_CONFLICT_CLAUSE); STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode onKeyword = parseOnKeyword(); STNode conflictKeyword = parseConflictKeyword(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr); } /** * Parse conflict keyword. * * @return Conflict keyword node */ private STNode parseConflictKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.CONFLICT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.CONFLICT_KEYWORD); return parseConflictKeyword(); } } /** * Parse limit clause. * <p> * <code>limitClause := limit expression</code> * * @return Limit expression node */ private STNode parseLimitClause(boolean isRhsExpr) { STToken nextToken = peek(); if (nextToken.kind != SyntaxKind.LIMIT_KEYWORD) { return STNodeFactory.createEmptyNode(); } STNode limitKeyword = consume(); STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); return STNodeFactory.createLimitClauseNode(limitKeyword, expr); } /** * Parse join clause. * <p> * <code> * join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause? * <br/> * join-var-decl := join (typeName | var) bindingPattern * <br/> * outer-join-var-decl := outer join var binding-pattern * </code> * * @return Join clause */ private STNode parseJoinClause(boolean isRhsExpr) { startContext(ParserRuleContext.JOIN_CLAUSE); STNode outerKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) { outerKeyword = consume(); } else { outerKeyword = STNodeFactory.createEmptyNode(); } STNode joinKeyword = parseJoinKeyword(); STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE); STNode inKeyword = parseInKeyword(); STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false); endContext(); STNode onCondition = parseOnClause(isRhsExpr); return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression, onCondition); } /** * Parse on clause. * <p> * <code>on clause := on expression</code> * * @return On clause node */ private STNode parseOnClause(boolean isRhsExpr) { STNode onKeyword = parseOnKeyword(); STNode lhsExpression = parseExpression(OperatorPrecedence.EQUALITY, isRhsExpr, false); STNode equalsKeyword = parseEqualsKeyword(); STNode rhsExpression = parseExpression(OperatorPrecedence.EQUALITY, isRhsExpr, false); return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression); } /** * Parse start action. * <p> * <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code> * * @return Start action node */ private STNode parseStartAction(STNode annots) { STNode startKeyword = parseStartKeyword(); STNode expr = parseActionOrExpression(); switch (expr.kind) { case FUNCTION_CALL: case METHOD_CALL: case REMOTE_METHOD_CALL_ACTION: break; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN); STNode arguments = STNodeFactory.createEmptyNodeList(); STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN, DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken); break; default: startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION); STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN); funcName = STNodeFactory.createSimpleNameReferenceNode(funcName); openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN); arguments = STNodeFactory.createEmptyNodeList(); closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN); expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments, closeParenToken); break; } return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr); } /** * Parse start keyword. * * @return Start keyword node */ private STNode parseStartKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.START_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.START_KEYWORD); return parseStartKeyword(); } } /** * Parse flush action. * <p> * <code>flush-action := flush [peer-worker]</code> * * @return flush action node */ private STNode parseFlushAction() { STNode flushKeyword = parseFlushKeyword(); STNode peerWorker = parseOptionalPeerWorkerName(); return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker); } /** * Parse flush keyword. * * @return flush keyword node */ private STNode parseFlushKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.FLUSH_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.FLUSH_KEYWORD); return parseFlushKeyword(); } } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parseOptionalPeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: return STNodeFactory.createEmptyNode(); } } /** * Parse intersection type descriptor. * <p> * intersection-type-descriptor := type-descriptor & type-descriptor * </p> * * @return Parsed node */ private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context, boolean isTypedBindingPattern) { STNode bitwiseAndToken = consume(); STNode rightTypeDesc = parseTypeDescriptor(context, isTypedBindingPattern, false); return createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) { leftTypeDesc = validateForUsageOfVar(leftTypeDesc); rightTypeDesc = validateForUsageOfVar(rightTypeDesc); return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc); } /** * Parse singleton type descriptor. * <p> * singleton-type-descriptor := simple-const-expr * simple-const-expr := * nil-literal * | boolean-literal * | [Sign] int-literal * | [Sign] floating-point-literal * | string-literal * | constant-reference-expr * </p> */ private STNode parseSingletonTypeDesc() { STNode simpleContExpr = parseSimpleConstExpr(); return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr); } private STNode parseSignedIntOrFloat() { STNode operator = parseUnaryOperator(); STNode literal; STToken nextToken = peek(); switch (nextToken.kind) { case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: literal = parseBasicLiteral(); break; default: literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN); literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal); } return STNodeFactory.createUnaryExpressionNode(operator, literal); } private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) { STToken nextNextToken = getNextNextToken(tokenKind); switch (tokenKind) { case STRING_LITERAL_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case TRUE_KEYWORD: case FALSE_KEYWORD: case NULL_KEYWORD: if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) { return true; } return false; case PLUS_TOKEN: case MINUS_TOKEN: return isIntOrFloat(nextNextToken); default: return false; } } static boolean isIntOrFloat(STToken token) { switch (token.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: return true; default: return false; } } private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) { switch (token.kind) { case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: case OPEN_PAREN_TOKEN: case OPEN_BRACKET_TOKEN: case PIPE_TOKEN: case BITWISE_AND_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return true; default: return false; } } /** * Check whether the parser reached to a valid expression start. * * @param nextTokenKind Kind of the next immediate token. * @param nextTokenIndex Index to the next token. * @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise */ private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) { nextTokenIndex++; switch (nextTokenKind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind; return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN || nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN || isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE); case IDENTIFIER_TOKEN: return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE); case OPEN_PAREN_TOKEN: case CHECK_KEYWORD: case CHECKPANIC_KEYWORD: case OPEN_BRACE_TOKEN: case TYPEOF_KEYWORD: case NEGATION_TOKEN: case EXCLAMATION_MARK_TOKEN: case TRAP_KEYWORD: case OPEN_BRACKET_TOKEN: case LT_TOKEN: case FROM_KEYWORD: case LET_KEYWORD: case BACKTICK_TOKEN: case NEW_KEYWORD: case LEFT_ARROW_TOKEN: return true; case PLUS_TOKEN: case MINUS_TOKEN: return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex); case FUNCTION_KEYWORD: case TABLE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD; case STREAM_KEYWORD: STToken nextNextToken = peek(nextTokenIndex); return nextNextToken.kind == SyntaxKind.KEY_KEYWORD || nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN || nextNextToken.kind == SyntaxKind.FROM_KEYWORD; case ERROR_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN; case SERVICE_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.OPEN_BRACE_TOKEN; case XML_KEYWORD: case STRING_KEYWORD: return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN; case START_KEYWORD: case FLUSH_KEYWORD: case WAIT_KEYWORD: default: return false; } } /** * Parse sync send action. * <p> * <code>sync-send-action := expression ->> peer-worker</code> * * @param expression LHS expression of the sync send action * @return Sync send action node */ private STNode parseSyncSendAction(STNode expression) { STNode syncSendToken = parseSyncSendToken(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker); } /** * Parse peer worker. * <p> * <code>peer-worker := worker-name | default</code> * * @return peer worker name node */ private STNode parsePeerWorkerName() { STToken token = peek(); switch (token.kind) { case IDENTIFIER_TOKEN: case DEFAULT_KEYWORD: return STNodeFactory.createSimpleNameReferenceNode(consume()); default: recover(token, ParserRuleContext.PEER_WORKER_NAME); return parsePeerWorkerName(); } } /** * Parse sync send token. * <p> * <code>sync-send-token := ->> </code> * * @return sync send token */ private STNode parseSyncSendToken() { STToken token = peek(); if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.SYNC_SEND_TOKEN); return parseSyncSendToken(); } } /** * Parse receive action. * <p> * <code>receive-action := single-receive-action | multiple-receive-action</code> * * @return Receive action */ private STNode parseReceiveAction() { STNode leftArrow = parseLeftArrowToken(); STNode receiveWorkers = parseReceiveWorkers(); return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers); } private STNode parseReceiveWorkers() { switch (peek().kind) { case DEFAULT_KEYWORD: case IDENTIFIER_TOKEN: return parsePeerWorkerName(); case OPEN_BRACE_TOKEN: return parseMultipleReceiveWorkers(); default: recover(peek(), ParserRuleContext.RECEIVE_WORKERS); return parseReceiveWorkers(); } } /** * Parse multiple worker receivers. * <p> * <code>{ receive-field (, receive-field)* }</code> * * @return Multiple worker receiver node */ private STNode parseMultipleReceiveWorkers() { startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS); STNode openBrace = parseOpenBrace(); STNode receiveFields = parseReceiveFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION); return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace); } private STNode parseReceiveFields() { List<STNode> receiveFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfReceiveFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); STNode recieveFieldEnd; while (!isEndOfReceiveFields(nextToken.kind)) { recieveFieldEnd = parseReceiveFieldEnd(); if (recieveFieldEnd == null) { break; } receiveFields.add(recieveFieldEnd); receiveField = parseReceiveField(); receiveFields.add(receiveField); nextToken = peek(); } return STNodeFactory.createNodeList(receiveFields); } private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseReceiveFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.RECEIVE_FIELD_END); return parseReceiveFieldEnd(); } } /** * Parse receive field. * <p> * <code>receive-field := peer-worker | field-name : peer-worker</code> * * @param nextTokenKind Kind of the next token * @return Receiver field node */ private STNode parseReceiveField() { switch (peek().kind) { case DEFAULT_KEYWORD: STNode defaultKeyword = parseDefaultKeyword(); return STNodeFactory.createSimpleNameReferenceNode(defaultKeyword); case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME); return createQualifiedReceiveField(identifier); default: recover(peek(), ParserRuleContext.RECEIVE_FIELD); return parseReceiveField(); } } private STNode createQualifiedReceiveField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode peerWorker = parsePeerWorkerName(); return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker); } /** * Parse left arrow (<-) token. * * @return left arrow token */ private STNode parseLeftArrowToken() { STToken token = peek(); if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.LEFT_ARROW_TOKEN); return parseLeftArrowToken(); } } /** * Parse signed right shift token (>>). * * @return Parsed node */ private STNode parseSignedRightShiftToken() { STNode openGTToken = consume(); STToken endLGToken = consume(); STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); if (hasTrailingMinutiae(openGTToken)) { doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP); } return doubleGTToken; } /** * Parse unsigned right shift token (>>>). * * @return Parsed node */ private STNode parseUnsignedRightShiftToken() { STNode openGTToken = consume(); STNode middleGTToken = consume(); STNode endLGToken = consume(); STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN, openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae()); boolean validOpenGTToken = !hasTrailingMinutiae(openGTToken); boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken); if (validOpenGTToken && validMiddleGTToken) { return unsignedRightShiftToken; } unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken, DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP); return unsignedRightShiftToken; } /** * Parse wait action. * <p> * <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code> * * @return Wait action node */ private STNode parseWaitAction() { STNode waitKeyword = parseWaitKeyword(); if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) { return parseMultiWaitAction(waitKeyword); } return parseSingleOrAlternateWaitAction(waitKeyword); } /** * Parse wait keyword. * * @return wait keyword */ private STNode parseWaitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.WAIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.WAIT_KEYWORD); return parseWaitKeyword(); } } /** * Parse single or alternate wait actions. * <p> * <code> * alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+ * <br/> * wait-future-expr := expression but not mapping-constructor-expr * </code> * * @param waitKeyword wait keyword * @return Single or alternate wait action node */ private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS); STToken nextToken = peek(); if (isEndOfWaitFutureExprList(nextToken.kind)) { endContext(); STNode waitFutureExprs = STNodeFactory .createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN)); waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs, DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs); } List<STNode> waitFutureExprList = new ArrayList<>(); STNode waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); STNode waitFutureExprEnd; while (!isEndOfWaitFutureExprList(nextToken.kind)) { waitFutureExprEnd = parseWaitFutureExprEnd(); if (waitFutureExprEnd == null) { break; } waitFutureExprList.add(waitFutureExprEnd); waitField = parseWaitFutureExpr(); waitFutureExprList.add(waitField); nextToken = peek(); } endContext(); return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0)); } private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: case SEMICOLON_TOKEN: case OPEN_BRACE_TOKEN: return true; case PIPE_TOKEN: default: return false; } } private STNode parseWaitFutureExpr() { STNode waitFutureExpr = parseActionOrExpression(); if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR); } else if (isAction(waitFutureExpr)) { waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR); } return waitFutureExpr; } private STNode parseWaitFutureExprEnd() { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: return parsePipeToken(); default: if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) { return null; } recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END); return parseWaitFutureExprEnd(); } } /** * Parse multiple wait action. * <p> * <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code> * * @param waitKeyword Wait keyword * @return Multiple wait action node */ private STNode parseMultiWaitAction(STNode waitKeyword) { startContext(ParserRuleContext.MULTI_WAIT_FIELDS); STNode openBrace = parseOpenBrace(); STNode waitFields = parseWaitFields(); STNode closeBrace = parseCloseBrace(); endContext(); openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace, DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION); STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace); return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode); } private STNode parseWaitFields() { List<STNode> waitFields = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfWaitFields(nextToken.kind)) { return STNodeFactory.createEmptyNodeList(); } STNode waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); STNode waitFieldEnd; while (!isEndOfWaitFields(nextToken.kind)) { waitFieldEnd = parseWaitFieldEnd(); if (waitFieldEnd == null) { break; } waitFields.add(waitFieldEnd); waitField = parseWaitField(); waitFields.add(waitField); nextToken = peek(); } return STNodeFactory.createNodeList(waitFields); } private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseWaitFieldEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.WAIT_FIELD_END); return parseWaitFieldEnd(); } } /** * Parse wait field. * <p> * <code>wait-field := variable-name | field-name : wait-future-expr</code> * * @param nextTokenKind Kind of the next token * @return Receiver field node */ private STNode parseWaitField() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME); identifier = STNodeFactory.createSimpleNameReferenceNode(identifier); return createQualifiedWaitField(identifier); default: recover(peek(), ParserRuleContext.WAIT_FIELD_NAME); return parseWaitField(); } } private STNode createQualifiedWaitField(STNode identifier) { if (peek().kind != SyntaxKind.COLON_TOKEN) { return identifier; } STNode colon = parseColon(); STNode waitFutureExpr = parseWaitFutureExpr(); return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr); } /** * Parse annot access expression. * <p> * <code> * annot-access-expr := expression .@ annot-tag-reference * <br/> * annot-tag-reference := qualified-identifier | identifier * </code> * * @param lhsExpr Preceding expression of the annot access access * @return Parsed node */ private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode annotAccessToken = parseAnnotChainingToken(); STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference); } /** * Parse annot-chaining-token. * * @return Parsed node */ private STNode parseAnnotChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN); return parseAnnotChainingToken(); } } /** * Parse field access identifier. * <p> * <code>field-access-identifier := qualified-identifier | identifier</code> * * @return Parsed node */ private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) { return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr); } /** * Parse query action. * <p> * <code>query-action := query-pipeline do-clause limit-clause? * <br/> * do-clause := do block-stmt * </code> * * @param queryConstructType Query construct type. This is only for validation * @param queryPipeline Query pipeline * @param selectClause Select clause if any This is only for validation. * @return Query action node */ private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause, boolean isRhsExpr) { if (queryConstructType != null) { queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType, DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION); } if (selectClause != null) { queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause, DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION); } startContext(ParserRuleContext.DO_CLAUSE); STNode doKeyword = parseDoKeyword(); STNode blockStmt = parseBlockNode(); endContext(); STNode limitClause = parseLimitClause(isRhsExpr); return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt, limitClause); } /** * Parse 'do' keyword. * * @return do keyword node */ private STNode parseDoKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.DO_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.DO_KEYWORD); return parseDoKeyword(); } } /** * Parse optional field access or xml optional attribute access expression. * <p> * <code> * optional-field-access-expr := expression ?. field-name * <br/> * xml-optional-attribute-access-expr := expression ?. xml-attribute-name * <br/> * xml-attribute-name := xml-qualified-name | qualified-identifier | identifier * <br/> * xml-qualified-name := xml-namespace-prefix : identifier * <br/> * xml-namespace-prefix := identifier * </code> * * @param lhsExpr Preceding expression of the optional access * @return Parsed node */ private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) { STNode optionalFieldAccessToken = parseOptionalChainingToken(); STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr); return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName); } /** * Parse optional chaining token. * * @return parsed node */ private STNode parseOptionalChainingToken() { STToken token = peek(); if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN); return parseOptionalChainingToken(); } } /** * Parse conditional expression. * <p> * <code>conditional-expr := expression ? expression : expression</code> * * @param lhsExpr Preceding expression of the question mark * @return Parsed node */ private STNode parseConditionalExpression(STNode lhsExpr) { startContext(ParserRuleContext.CONDITIONAL_EXPRESSION); STNode questionMark = parseQuestionMark(); STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true); STNode nextToken = peek(); STNode endExpr; STNode colon; if (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr; middleExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.modulePrefix); colon = qualifiedNameRef.colon; endContext(); endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier); } else { colon = parseColon(); endContext(); endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false); } return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr); } /** * Parse enum declaration. * <p> * module-enum-decl := * metadata * [public] enum identifier { enum-member (, enum-member)* } * enum-member := metadata identifier [= const-expr] * </p> * * @param metadata * @param qualifier * @return Parsed enum node. */ private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) { startContext(ParserRuleContext.MODULE_ENUM_DECLARATION); STNode enumKeywordToken = parseEnumKeyword(); STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME); STNode openBraceToken = parseOpenBrace(); STNode enumMemberList = parseEnumMemberList(); STNode closeBraceToken = parseCloseBrace(); endContext(); openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken, DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER); return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier, openBraceToken, enumMemberList, closeBraceToken); } /** * Parse 'enum' keyword. * * @return enum keyword node */ private STNode parseEnumKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ENUM_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ENUM_KEYWORD); return parseEnumKeyword(); } } /** * Parse enum member list. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return enum member list node. */ private STNode parseEnumMemberList() { startContext(ParserRuleContext.ENUM_MEMBER_LIST); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { return STNodeFactory.createEmptyNodeList(); } List<STNode> enumMemberList = new ArrayList<>(); STNode enumMember = parseEnumMember(); STNode enumMemberRhs; while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) { enumMemberRhs = parseEnumMemberEnd(); if (enumMemberRhs == null) { break; } enumMemberList.add(enumMember); enumMemberList.add(enumMemberRhs); enumMember = parseEnumMember(); } enumMemberList.add(enumMember); endContext(); return STNodeFactory.createNodeList(enumMemberList); } /** * Parse enum member. * <p> * enum-member := metadata identifier [= const-expr] * </p> * * @return Parsed enum member node. */ private STNode parseEnumMember() { STNode metadata; switch (peek().kind) { case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: metadata = STNodeFactory.createEmptyNode(); } STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME); return parseEnumMemberRhs(metadata, identifierNode); } private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) { STNode equalToken, constExprNode; switch (peek().kind) { case EQUAL_TOKEN: equalToken = parseAssignOp(); constExprNode = parseExpression(); break; case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: equalToken = STNodeFactory.createEmptyNode(); constExprNode = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode); return parseEnumMemberRhs(metadata, identifierNode); } return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode); } private STNode parseEnumMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.ENUM_MEMBER_END); return parseEnumMemberEnd(); } } /** * Parse transaction statement. * <p> * <code>transaction-stmt := "transaction" block-stmt ;</code> * * @return Transaction statement node */ private STNode parseTransactionStatement() { startContext(ParserRuleContext.TRANSACTION_STMT); STNode transactionKeyword = parseTransactionKeyword(); STNode blockStmt = parseBlockNode(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause); } /** * Parse transaction keyword. * * @return parsed node */ private STNode parseTransactionKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTION_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTION_KEYWORD); return parseTransactionKeyword(); } } /** * Parse commit action. * <p> * <code>commit-action := "commit"</code> * * @return Commit action node */ private STNode parseCommitAction() { STNode commitKeyword = parseCommitKeyword(); return STNodeFactory.createCommitActionNode(commitKeyword); } /** * Parse commit keyword. * * @return parsed node */ private STNode parseCommitKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.COMMIT_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.COMMIT_KEYWORD); return parseCommitKeyword(); } } /** * Parse retry statement. * <p> * <code> * retry-stmt := "retry" retry-spec block-stmt * <br/> * retry-spec := [type-parameter] [ "(" arg-list ")" ] * </code> * * @return Retry statement node */ private STNode parseRetryStatement() { startContext(ParserRuleContext.RETRY_STMT); STNode retryKeyword = parseRetryKeyword(); STNode retryStmt = parseRetryKeywordRhs(retryKeyword); endContext(); return retryStmt; } private STNode parseRetryKeywordRhs(STNode retryKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case LT_TOKEN: STNode typeParam = parseTypeParameter(); return parseRetryTypeParamRhs(retryKeyword, typeParam); case OPEN_PAREN_TOKEN: case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: typeParam = STNodeFactory.createEmptyNode(); return parseRetryTypeParamRhs(retryKeyword, typeParam); default: recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword); return parseRetryKeywordRhs(retryKeyword); } } private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) { STNode args; switch (peek().kind) { case OPEN_PAREN_TOKEN: args = parseParenthesizedArgList(); break; case OPEN_BRACE_TOKEN: case TRANSACTION_KEYWORD: args = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam); return parseRetryTypeParamRhs(retryKeyword, typeParam); } STNode blockStmt = parseRetryBody(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause); } private STNode parseRetryBody() { switch (peek().kind) { case OPEN_BRACE_TOKEN: return parseBlockNode(); case TRANSACTION_KEYWORD: return parseTransactionStatement(); default: recover(peek(), ParserRuleContext.RETRY_BODY); return parseRetryBody(); } } private STNode parseOnFailClause() { STNode onKeyword = parseOnKeyword(); STNode failKeyword; STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.FAIL_KEYWORD) { failKeyword = parseFailKeyword(); } else { failKeyword = STNodeFactory.createEmptyNode(); } STNode typeDescriptorNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode identifierNode = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode blockStatement = parseBlockNode(); return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptorNode, identifierNode, blockStatement); } /** * Parse retry keyword. * * @return parsed node */ private STNode parseRetryKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.RETRY_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.RETRY_KEYWORD); return parseRetryKeyword(); } } /** * Parse transaction statement. * <p> * <code>rollback-stmt := "rollback" [expression] ";"</code> * * @return Rollback statement node */ private STNode parseRollbackStatement() { startContext(ParserRuleContext.ROLLBACK_STMT); STNode rollbackKeyword = parseRollbackKeyword(); STNode expression; if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) { expression = STNodeFactory.createEmptyNode(); } else { expression = parseExpression(); } STNode semicolon = parseSemicolon(); endContext(); return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon); } /** * Parse rollback keyword. * * @return Rollback keyword node */ private STNode parseRollbackKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.ROLLBACK_KEYWORD); return parseRollbackKeyword(); } } /** * Parse transactional expression. * <p> * <code>transactional-expr := "transactional"</code> * * @return Transactional expression node */ private STNode parseTransactionalExpression() { STNode transactionalKeyword = parseTransactionalKeyword(); return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword); } /** * Parse transactional keyword. * * @return Transactional keyword node */ private STNode parseTransactionalKeyword() { STToken token = peek(); if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD); return parseTransactionalKeyword(); } } /** * Parse service-constructor-expr. * <p> * <code> * service-constructor-expr := [annots] service service-body-block * <br/> * service-body-block := { service-method-defn* } * <br/> * service-method-defn := metadata [resource] function identifier function-signature method-defn-body * </code> * * @param annots Annotations * @return Service constructor expression node */ private STNode parseServiceConstructorExpression(STNode annots) { startContext(ParserRuleContext.SERVICE_CONSTRUCTOR_EXPRESSION); STNode serviceKeyword = parseServiceKeyword(); STNode serviceBody = parseServiceBody(); endContext(); return STNodeFactory.createServiceConstructorExpressionNode(annots, serviceKeyword, serviceBody); } /** * Parse base16 literal. * <p> * <code> * byte-array-literal := Base16Literal | Base64Literal * <br/> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * </code> * * @param kind byte array literal kind * @return parsed node */ private STNode parseByteArrayLiteral() { STNode type; if (peek().kind == SyntaxKind.BASE16_KEYWORD) { type = parseBase16Keyword(); } else { type = parseBase64Keyword(); } STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START); if (startingBackTick.isMissing()) { startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN); STNode content = STNodeFactory.createEmptyNode(); STNode byteArrayLiteral = STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick); byteArrayLiteral = SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT); return byteArrayLiteral; } STNode content = parseByteArrayContent(); return parseByteArrayLiteral(type, startingBackTick, content); } /** * Parse byte array literal. * * @param typeKeyword keyword token, possible values are `base16` and `base64` * @param startingBackTick starting backtick token * @param byteArrayContent byte array literal content to be validated * @return parsed byte array literal node */ private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) { STNode content = STNodeFactory.createEmptyNode(); STNode newStartingBackTick = startingBackTick; STNodeList items = (STNodeList) byteArrayContent; if (items.size() == 1) { STNode item = items.get(0); if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL); } else if (item.kind != SyntaxKind.TEMPLATE_STRING) { newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } else { content = item; } } else if (items.size() > 1) { STNode clonedStartingBackTick = startingBackTick; for (int index = 0; index < items.size(); index++) { STNode item = items.get(index); clonedStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item); } newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick, DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL); } STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END); return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick); } /** * Parse <code>base16</code> keyword. * * @return base16 keyword node */ private STNode parseBase16Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE16_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE16_KEYWORD); return parseBase16Keyword(); } } /** * Parse <code>base64</code> keyword. * * @return base64 keyword node */ private STNode parseBase64Keyword() { STToken token = peek(); if (token.kind == SyntaxKind.BASE64_KEYWORD) { return consume(); } else { recover(token, ParserRuleContext.BASE64_KEYWORD); return parseBase64Keyword(); } } /** * Validate and parse byte array literal content. * An error is reported, if the content is invalid. * * @return parsed node */ private STNode parseByteArrayContent() { STToken nextToken = peek(); List<STNode> items = new ArrayList<>(); while (!isEndOfBacktickContent(nextToken.kind)) { STNode content = parseTemplateItem(); items.add(content); nextToken = peek(); } return STNodeFactory.createNodeList(items); } /** * Validate base16 literal content. * <p> * <code> * Base16Literal := base16 WS ` HexGroup* WS ` * <br/> * HexGroup := WS HexDigit WS HexDigit * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase16LiteralContent(String content) { char[] charArray = content.toCharArray(); int hexDigitCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; default: if (isHexDigit(c)) { hexDigitCount++; } else { return false; } break; } } return hexDigitCount % 2 == 0; } /** * Validate base64 literal content. * <p> * <code> * Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS ` * <br/> * Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char * <br/> * PaddedBase64Group := * WS Base64Char WS Base64Char WS Base64Char WS PaddingChar * | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar * <br/> * Base64Char := A .. Z | a .. z | 0 .. 9 | + | / * <br/> * PaddingChar := = * <br/> * WS := WhiteSpaceChar* * <br/> * WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20 * </code> * * @param content the string surrounded by the backticks * @return <code>true</code>, if the string content is valid. <code>false</code> otherwise. */ static boolean isValidBase64LiteralContent(String content) { char[] charArray = content.toCharArray(); int base64CharCount = 0; int paddingCharCount = 0; for (char c : charArray) { switch (c) { case LexerTerminals.TAB: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: break; case LexerTerminals.EQUAL: paddingCharCount++; break; default: if (isBase64Char(c)) { if (paddingCharCount == 0) { base64CharCount++; } else { return false; } } else { return false; } break; } } if (paddingCharCount > 2) { return false; } else if (paddingCharCount == 0) { return base64CharCount % 4 == 0; } else { return base64CharCount % 4 == 4 - paddingCharCount; } } /** * <p> * Check whether a given char is a base64 char. * </p> * <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code> * * @param c character to check * @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise. */ static boolean isBase64Char(int c) { if ('a' <= c && c <= 'z') { return true; } if ('A' <= c && c <= 'Z') { return true; } if (c == '+' || c == '/') { return true; } return isDigit(c); } static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * Parse xml filter expression. * <p> * <code>xml-filter-expr := expression .< xml-name-pattern ></code> * * @param lhsExpr Preceding expression of .< token * @return Parsed node */ private STNode parseXMLFilterExpression(STNode lhsExpr) { STNode xmlNamePatternChain = parseXMLFilterExpressionRhs(); return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain); } /** * Parse xml filter expression rhs. * <p> * <code>filer-expression-rhs := .< xml-name-pattern ></code> * * @return Parsed node */ private STNode parseXMLFilterExpressionRhs() { STNode dotLTToken = parseDotLTToken(); return parseXMLNamePatternChain(dotLTToken); } /** * Parse xml name pattern chain. * <p> * <code> * xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step * <br/> * filer-expression-rhs := .< xml-name-pattern > * <br/> * xml-element-children-step := /< xml-name-pattern > * <br/> * xml-element-descendants-step := /**\/<xml-name-pattern > * </code> * * @param startToken Preceding token of xml name pattern * @return Parsed node */ private STNode parseXMLNamePatternChain(STNode startToken) { startContext(ParserRuleContext.XML_NAME_PATTERN); STNode xmlNamePattern = parseXMLNamePattern(); STNode gtToken = parseGTToken(); endContext(); startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken, DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN); return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken); } /** * Parse <code> .< </code> token. * * @return Parsed node */ private STNode parseDotLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOT_LT_TOKEN); return parseDotLTToken(); } } /** * Parse xml name pattern. * <p> * <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code> * * @return Parsed node */ private STNode parseXMLNamePattern() { List<STNode> xmlAtomicNamePatternList = new ArrayList<>(); STToken nextToken = peek(); if (isEndOfXMLNamePattern(nextToken.kind)) { return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); STNode separator; while (!isEndOfXMLNamePattern(peek().kind)) { separator = parseXMLNamePatternSeparator(); if (separator == null) { break; } xmlAtomicNamePatternList.add(separator); xmlAtomicNamePattern = parseXMLAtomicNamePattern(); xmlAtomicNamePatternList.add(xmlAtomicNamePattern); } return STNodeFactory.createNodeList(xmlAtomicNamePatternList); } private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) { switch (tokenKind) { case GT_TOKEN: case EOF_TOKEN: return true; case IDENTIFIER_TOKEN: case ASTERISK_TOKEN: case COLON_TOKEN: default: return false; } } private STNode parseXMLNamePatternSeparator() { STToken token = peek(); switch (token.kind) { case PIPE_TOKEN: return consume(); case GT_TOKEN: case EOF_TOKEN: return null; default: recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS); return parseXMLNamePatternSeparator(); } } /** * Parse xml atomic name pattern. * <p> * <code> * xml-atomic-name-pattern := * * * | identifier * | xml-namespace-prefix : identifier * | xml-namespace-prefix : * * </code> * * @return Parsed node */ private STNode parseXMLAtomicNamePattern() { startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN); STNode atomicNamePattern = parseXMLAtomicNamePatternBody(); endContext(); return atomicNamePattern; } private STNode parseXMLAtomicNamePatternBody() { STToken token = peek(); STNode identifier; switch (token.kind) { case ASTERISK_TOKEN: return consume(); case IDENTIFIER_TOKEN: identifier = consume(); break; default: recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START); return parseXMLAtomicNamePatternBody(); } return parseXMLAtomicNameIdentifier(identifier); } private STNode parseXMLAtomicNameIdentifier(STNode identifier) { STToken token = peek(); if (token.kind == SyntaxKind.COLON_TOKEN) { STNode colon = consume(); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) { STToken endToken = consume(); return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken); } } return STNodeFactory.createSimpleNameReferenceNode(identifier); } /** * Parse xml step expression. * <p> * <code>xml-step-expr := expression xml-step-start</code> * * @param lhsExpr Preceding expression of /*, /<, or /**\/< token * @return Parsed node */ private STNode parseXMLStepExpression(STNode lhsExpr) { STNode xmlStepStart = parseXMLStepStart(); return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart); } /** * Parse xml filter expression rhs. * <p> * <code> * xml-step-start := * xml-all-children-step * | xml-element-children-step * | xml-element-descendants-step * <br/> * xml-all-children-step := /* * </code> * * @return Parsed node */ private STNode parseXMLStepStart() { STToken token = peek(); STNode startToken; switch (token.kind) { case SLASH_ASTERISK_TOKEN: return consume(); case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN: startToken = parseDoubleSlashDoubleAsteriskLTToken(); break; case SLASH_LT_TOKEN: default: startToken = parseSlashLTToken(); break; } return parseXMLNamePatternChain(startToken); } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseSlashLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN); return parseSlashLTToken(); } } /** * Parse <code> /< </code> token. * * @return Parsed node */ private STNode parseDoubleSlashDoubleAsteriskLTToken() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) { return consume(); } else { recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); return parseDoubleSlashDoubleAsteriskLTToken(); } } /** * Parse match statement. * <p> * <code>match-stmt := match action-or-expr { match-clause+ }</code> * * @return Match statement */ private STNode parseMatchStatement() { startContext(ParserRuleContext.MATCH_STMT); STNode matchKeyword = parseMatchKeyword(); STNode actionOrExpr = parseActionOrExpression(); startContext(ParserRuleContext.MATCH_BODY); STNode openBrace = parseOpenBrace(); STNode matchClauses = parseMatchClauses(); STNode closeBrace = parseCloseBrace(); endContext(); endContext(); STNode onFailClause; if (peek().kind == SyntaxKind.ON_KEYWORD) { onFailClause = parseOnFailClause(); } else { onFailClause = STNodeFactory.createEmptyNode(); } return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace, onFailClause); } /** * Parse match keyword. * * @return Match keyword node */ private STNode parseMatchKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.MATCH_KEYWORD); return parseMatchKeyword(); } } /** * Parse match clauses list. * * @return Match clauses list */ private STNode parseMatchClauses() { List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchClauses(peek().kind)) { STNode clause = parseMatchClause(); matchClauses.add(clause); } return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } /** * Parse a single match match clause. * <p> * <code> * match-clause := match-pattern-list [match-guard] => block-stmt * <br/> * match-guard := if expression * </code> * * @return A match clause */ private STNode parseMatchClause() { STNode matchPatterns = parseMatchPatternList(); STNode matchGuard = parseMatchGuard(); STNode rightDoubleArrow = parseDoubleRightArrow(); STNode blockStmt = parseBlockNode(); return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt); } /** * Parse match guard. * <p> * <code>match-guard := if expression</code> * * @return Match guard */ private STNode parseMatchGuard() { switch (peek().kind) { case IF_KEYWORD: STNode ifKeyword = parseIfKeyword(); STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false); return STNodeFactory.createMatchGuardNode(ifKeyword, expr); case RIGHT_DOUBLE_ARROW_TOKEN: return STNodeFactory.createEmptyNode(); default: recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD); return parseMatchGuard(); } } /** * Parse match patterns list. * <p> * <code>match-pattern-list := match-pattern (| match-pattern)*</code> * * @return Match patterns list */ private STNode parseMatchPatternList() { startContext(ParserRuleContext.MATCH_PATTERN); List<STNode> matchClauses = new ArrayList<>(); while (!isEndOfMatchPattern(peek().kind)) { STNode clause = parseMatchPattern(); if (clause == null) { break; } matchClauses.add(clause); STNode seperator = parseMatchPatternEnd(); if (seperator == null) { break; } matchClauses.add(seperator); } endContext(); return STNodeFactory.createNodeList(matchClauses); } private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case PIPE_TOKEN: case IF_KEYWORD: case RIGHT_ARROW_TOKEN: return true; default: return false; } } /** * Parse match pattern. * <p> * <code> * match-pattern := var binding-pattern * | wildcard-match-pattern * | const-pattern * | list-match-pattern * | mapping-match-pattern * | functional-match-pattern * </code> * * @return Match pattern */ private STNode parseMatchPattern() { switch (peek().kind) { case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: return parseSimpleConstExpr(); case IDENTIFIER_TOKEN: STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN); return parseFunctionalMatchPatternOrConsPattern(typeRefOrConstExpr); case VAR_KEYWORD: return parseVarTypedBindingPattern(); case OPEN_BRACKET_TOKEN: return parseListMatchPattern(); case OPEN_BRACE_TOKEN: return parseMappingMatchPattern(); case ERROR_KEYWORD: return parseFunctionalMatchPattern(consume()); default: recover(peek(), ParserRuleContext.MATCH_PATTERN_START); return parseMatchPattern(); } } private STNode parseMatchPatternEnd() { switch (peek().kind) { case PIPE_TOKEN: return parsePipeToken(); case IF_KEYWORD: case RIGHT_DOUBLE_ARROW_TOKEN: return null; default: recover(peek(), ParserRuleContext.MATCH_PATTERN_RHS); return parseMatchPatternEnd(); } } /** * Parse var typed binding pattern. * <p> * <code>var binding-pattern</code> * </p> * * @return Parsed typed binding pattern node */ private STNode parseVarTypedBindingPattern() { STNode varKeyword = parseVarKeyword(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(varKeyword, bindingPattern); } /** * Parse var keyword. * * @return Var keyword node */ private STNode parseVarKeyword() { STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.VAR_KEYWORD) { return consume(); } else { recover(nextToken, ParserRuleContext.VAR_KEYWORD); return parseVarKeyword(); } } /** * Parse list match pattern. * <p> * <code> * list-match-pattern := [ list-member-match-patterns ] * list-member-match-patterns := * match-pattern (, match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * </code> * </p> * * @return Parsed list match pattern node */ private STNode parseListMatchPattern() { startContext(ParserRuleContext.LIST_MATCH_PATTERN); STNode openBracketToken = parseOpenBracket(); List<STNode> matchPatternList = new ArrayList<>(); STNode restMatchPattern = null; STNode listMatchPatternMemberRhs = null; boolean isEndOfFields = false; while (!isEndOfListMatchPattern()) { STNode listMatchPatternMember = parseListMatchPatternMember(); if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { restMatchPattern = listMatchPatternMember; listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); isEndOfFields = true; break; } matchPatternList.add(listMatchPatternMember); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); if (listMatchPatternMemberRhs != null) { matchPatternList.add(listMatchPatternMemberRhs); } else { break; } } while (isEndOfFields && listMatchPatternMemberRhs != null) { STNode invalidField = parseListMatchPatternMember(); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, listMatchPatternMemberRhs); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField); restMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern, DiagnosticErrorCode.ERROR_MORE_MATCH_PATTERNS_AFTER_REST_MATCH_PATTERN); listMatchPatternMemberRhs = parseListMatchPatternMemberRhs(); } if (restMatchPattern == null) { restMatchPattern = STNodeFactory.createEmptyNode(); } STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList); STNode closeBracketToken = parseCloseBracket(); endContext(); return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, restMatchPattern, closeBracketToken); } public boolean isEndOfListMatchPattern() { switch (peek().kind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseListMatchPatternMember() { STNode nextToken = peek(); switch (nextToken.kind) { case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: return parseMatchPattern(); } } /** * Parse rest match pattern. * <p> * <code> * rest-match-pattern := ... var variable-name * </code> * </p> * * @return Parsed rest match pattern node */ private STNode parseRestMatchPattern() { startContext(ParserRuleContext.REST_MATCH_PATTERN); STNode ellipsisToken = parseEllipsis(); STNode varKeywordToken = parseVarKeyword(); STNode variableName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName); return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode); } private STNode parseListMatchPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS); return parseListMatchPatternMemberRhs(); } } /** * Parse mapping match pattern. * <p> * mapping-match-pattern := { field-match-patterns } * <br/> * field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern] * | [ rest-match-pattern ] * <br/> * field-match-pattern := field-name : match-pattern * <br/> * rest-match-pattern := ... var variable-name * </p> * * @return Parsed Node. */ private STNode parseMappingMatchPattern() { startContext(ParserRuleContext.MAPPING_MATCH_PATTERN); STNode openBraceToken = parseOpenBrace(); List<STNode> fieldMatchPatternList = new ArrayList<>(); STNode restMatchPattern = null; boolean isEndOfFields = false; while (!isEndOfMappingMatchPattern()) { STNode fieldMatchPatternMember = parseFieldMatchPatternMember(); if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) { restMatchPattern = fieldMatchPatternMember; isEndOfFields = true; break; } fieldMatchPatternList.add(fieldMatchPatternMember); STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); if (fieldMatchPatternRhs != null) { fieldMatchPatternList.add(fieldMatchPatternRhs); } else { break; } } STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs(); while (isEndOfFields && fieldMatchPatternRhs != null) { STNode invalidField = parseFieldMatchPatternMember(); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, fieldMatchPatternRhs); restMatchPattern = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restMatchPattern, invalidField); restMatchPattern = SyntaxErrors.addDiagnostic(restMatchPattern, DiagnosticErrorCode.ERROR_MORE_FIELD_MATCH_PATTERNS_AFTER_REST_FIELD); fieldMatchPatternRhs = parseFieldMatchPatternRhs(); } if (restMatchPattern == null) { restMatchPattern = STNodeFactory.createEmptyNode(); } STNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList); STNode closeBraceToken = parseCloseBrace(); endContext(); return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, restMatchPattern, closeBraceToken); } private STNode parseFieldMatchPatternMember() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseFieldMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER); return parseFieldMatchPatternMember(); } } /** * Parse filed match pattern. * <p> * field-match-pattern := field-name : match-pattern * </p> * * @return Parsed field match pattern node */ public STNode parseFieldMatchPattern() { STNode fieldNameNode = parseVariableName(); STNode colonToken = parseColon(); STNode matchPattern = parseMatchPattern(); return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern); } public boolean isEndOfMappingMatchPattern() { switch (peek().kind) { case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseFieldMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS); return parseFieldMatchPatternRhs(); } } private STNode parseFunctionalMatchPatternOrConsPattern(STNode typeRefOrConstExpr) { return parseFunctionalMatchPatternOrConsPattern(peek().kind, typeRefOrConstExpr); } private STNode parseFunctionalMatchPatternOrConsPattern(SyntaxKind nextToken, STNode typeRefOrConstExpr) { switch (nextToken) { case OPEN_PAREN_TOKEN: return parseFunctionalMatchPattern(typeRefOrConstExpr); default: if (isMatchPatternEnd(peek().kind)) { return typeRefOrConstExpr; } Solution solution = recover(peek(), ParserRuleContext.FUNC_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr); return parseFunctionalMatchPatternOrConsPattern(solution.tokenKind, typeRefOrConstExpr); } } private boolean isMatchPatternEnd(SyntaxKind tokenKind) { switch (tokenKind) { case RIGHT_DOUBLE_ARROW_TOKEN: case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_PAREN_TOKEN: case PIPE_TOKEN: case IF_KEYWORD: case EOF_TOKEN: return true; default: return false; } } /** * Parse functional match pattern. * <p> * functional-match-pattern := functionally-constructible-type-reference ( arg-list-match-pattern ) * <br/> * functionally-constructible-type-reference := error | type-reference * <br/> * type-reference := identifier | qualified-identifier * <br/> * arg-list-match-pattern := positional-arg-match-patterns [, other-arg-match-patterns] * | other-arg-match-patterns * </p> * * @return Parsed functional match pattern node. */ private STNode parseFunctionalMatchPattern(STNode typeRef) { startContext(ParserRuleContext.FUNCTIONAL_MATCH_PATTERN); STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode argListMatchPatternNode = parseArgListMatchPatterns(); STNode closeParenthesisToken = parseCloseParenthesis(); endContext(); return STNodeFactory.createFunctionalMatchPatternNode(typeRef, openParenthesisToken, argListMatchPatternNode, closeParenthesisToken); } private STNode parseArgListMatchPatterns() { List<STNode> argListMatchPatterns = new ArrayList<>(); SyntaxKind lastValidArgKind = SyntaxKind.IDENTIFIER_TOKEN; while (!isEndOfFunctionalMatchPattern()) { STNode currentArg = parseArgMatchPattern(); DiagnosticErrorCode errorCode = validateArgMatchPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListMatchPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode); } STNode argRhs = parseArgMatchPatternRhs(); if (argRhs == null) { break; } if (errorCode == null) { argListMatchPatterns.add(argRhs); } else { updateLastNodeInListWithInvalidNode(argListMatchPatterns, argRhs, null); } } return STNodeFactory.createNodeList(argListMatchPatterns); } private boolean isEndOfFunctionalMatchPattern() { switch (peek().kind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse arg match patterns. * <code> * arg-match-pattern := match-pattern | named-arg-match-pattern | rest-match-pattern * </code> * <br/> * <br/> * * @return parsed arg match pattern node. */ private STNode parseArgMatchPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: return parseNamedOrPositionalArgMatchPattern(); case ELLIPSIS_TOKEN: return parseRestMatchPattern(); case OPEN_PAREN_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case PLUS_TOKEN: case MINUS_TOKEN: case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case VAR_KEYWORD: case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseMatchPattern(); default: recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN); return parseArgMatchPattern(); } } private STNode parseNamedOrPositionalArgMatchPattern() { STNode identifier = parseIdentifier(ParserRuleContext.MATCH_PATTERN_START); switch (peek().kind) { case EQUAL_TOKEN: return parseNamedArgMatchPattern(identifier); case OPEN_PAREN_TOKEN: return parseFunctionalMatchPattern(identifier); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return identifier; } } /** * Parses the next named arg match pattern. * <br/> * <code>named-arg-match-pattern := arg-name = match-pattern</code> * <br/> * <br/> * * @return arg match pattern list node added the new arg match pattern */ private STNode parseNamedArgMatchPattern(STNode identifier) { startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN); STNode equalToken = parseAssignOp(); STNode matchPattern = parseMatchPattern(); endContext(); return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern); } private STNode parseArgMatchPatternRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return null; default: recover(peek(), ParserRuleContext.ARG_MATCH_PATTERN_RHS); return parseArgMatchPatternRhs(); } } private DiagnosticErrorCode validateArgMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { DiagnosticErrorCode errorCode = null; switch (prevArgKind) { case NAMED_ARG_MATCH_PATTERN: if (currentArgKind != SyntaxKind.NAMED_ARG_MATCH_PATTERN && currentArgKind != SyntaxKind.REST_MATCH_PATTERN) { errorCode = DiagnosticErrorCode.ERROR_NAMED_ARG_FOLLOWED_BY_POSITIONAL_ARG; } break; case REST_MATCH_PATTERN: errorCode = DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; break; default: break; } return errorCode; } /** * Parse markdown documentation. * * @return markdown documentation node */ private STNode parseMarkdownDocumentation() { List<STNode> markdownDocLineList = new ArrayList<>(); STToken nextToken = peek(); while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) { STToken documentationString = consume(); STNode parsedDocLines = parseDocumentationString(documentationString); appendParsedDocumentationLines(markdownDocLineList, parsedDocLines); nextToken = peek(); } STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList); return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines); } /** * Parse documentation string. * * @return markdown documentation line list node */ private STNode parseDocumentationString(STToken documentationStringToken) { List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae()); CharReader charReader = CharReader.from(documentationStringToken.text()); DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList); AbstractTokenReader tokenReader = new TokenReader(documentationLexer); DocumentationParser documentationParser = new DocumentationParser(tokenReader); return documentationParser.parse(); } private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) { List<STNode> leadingTriviaList = new ArrayList<>(); int bucketCount = leadingMinutiaeNode.bucketCount(); for (int i = 0; i < bucketCount; i++) { leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i)); } return leadingTriviaList; } private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) { int bucketCount = parsedDocLines.bucketCount(); for (int i = 0; i < bucketCount; i++) { STNode markdownDocLine = parsedDocLines.childInBucket(i); markdownDocLineList.add(markdownDocLine); } } /** * Parse any statement that starts with a token that has ambiguity between being * a type-desc or an expression. * * @param annots Annotations * @return Statement node */ private STNode parseStmtStartsWithTypeOrExpr(STNode annots) { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode typeOrExpr = parseTypedBindingPatternOrExpr(true); return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr); } private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) { if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode finalKeyword = STNodeFactory.createEmptyNode(); switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typedBindingPatternOrExpr, false); } STNode expr = getExpression(typedBindingPatternOrExpr); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) { STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: return parseTypedBPOrExprStartsWithOpenParenthesis(); case FUNCTION_KEYWORD: return parseAnonFuncExprOrTypedBPWithFuncType(); case IDENTIFIER_TOKEN: typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case OPEN_BRACKET_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenBracket(); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: STNode basicLiteral = parseBasicLiteral(); return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseActionOrExpressionInLhs(null); } return parseTypedBindingPattern(ParserRuleContext.VAR_DECL_STMT); } } /** * Parse the component after the ambiguous starting node. Ambiguous node could be either an expr * or a type-desc. The component followed by this ambiguous node could be the binding-pattern or * the expression-rhs. * * @param typeOrExpr Type desc or the expression * @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a * valid lvalue expression * @return Typed-binding-pattern node or an expression node */ private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) { STToken nextToken = peek(); switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypedBPOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment); if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr; typeOrExpr = getTypeDescFromExpr(typeOrExpr); STNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor); return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern); } return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypedBPOrExpr); case SEMICOLON_TOKEN: if (isDefiniteExpr(typeOrExpr.kind)) { return typeOrExpr; } if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) { STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); } return typeOrExpr; case EQUAL_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment, ParserRuleContext.AMBIGUOUS_STMT); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeOrExpr); return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return typeOrExpr; } STToken token = peek(); recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment); return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment); } } private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) { startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); } private STNode parseTypedBPOrExprStartsWithOpenParenthesis() { STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis(); if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) { return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc); } return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false); } private boolean isDefiniteTypeDesc(SyntaxKind kind) { return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0; } private boolean isDefiniteExpr(SyntaxKind kind) { if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return false; } return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } /** * Parse type or expression that starts with open parenthesis. Possible options are: * 1) () - nil type-desc or nil-literal * 2) (T) - Parenthesized type-desc * 3) (expr) - Parenthesized expression * 4) (param, param, ..) - Anon function params * * @return Type-desc or expression node */ private STNode parseTypedDescOrExprStartsWithOpenParenthesis() { STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STToken nextToken = peek(); if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) { STNode closeParen = parseCloseParenthesis(); return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen); } STNode typeOrExpr = parseTypeDescOrExpr(); if (isAction(typeOrExpr)) { STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr, closeParen); } if (isExpression(typeOrExpr.kind)) { startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS); return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false); } STNode closeParen = parseCloseParenthesis(); return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeOrExpr, closeParen); } /** * Parse type-desc or expression. This method does not handle binding patterns. * * @return Type-desc node or expression node */ private STNode parseTypeDescOrExpr() { STToken nextToken = peek(); STNode typeOrExpr; switch (nextToken.kind) { case OPEN_PAREN_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis(); break; case FUNCTION_KEYWORD: typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(); break; case IDENTIFIER_TOKEN: typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME); return parseTypeDescOrExprRhs(typeOrExpr); case OPEN_BRACKET_TOKEN: typeOrExpr = parseTypedDescOrExprStartsWithOpenBracket(); break; case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case STRING_LITERAL_TOKEN: case NULL_KEYWORD: case TRUE_KEYWORD: case FALSE_KEYWORD: case DECIMAL_FLOATING_POINT_LITERAL_TOKEN: case HEX_FLOATING_POINT_LITERAL_TOKEN: STNode basicLiteral = parseBasicLiteral(); return parseTypeDescOrExprRhs(basicLiteral); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseActionOrExpressionInLhs(null); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); } if (isDefiniteTypeDesc(typeOrExpr.kind)) { return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } return parseTypeDescOrExprRhs(typeOrExpr); } private boolean isExpression(SyntaxKind kind) { switch (kind) { case NUMERIC_LITERAL: case STRING_LITERAL_TOKEN: case NIL_LITERAL: case NULL_LITERAL: case BOOLEAN_LITERAL: return true; default: return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0; } } /** * Parse statement that starts with an empty parenthesis. Empty parenthesis can be * 1) Nil literal * 2) Nil type-desc * 3) Anon-function params * * @param openParen Open parenthesis * @param closeParen Close parenthesis * @return Parsed node */ private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) { STToken nextToken = peek(); switch (nextToken.kind) { case RIGHT_DOUBLE_ARROW_TOKEN: STNode params = STNodeFactory.createEmptyNodeList(); STNode anonFuncParam = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen); endContext(); return anonFuncParam; default: return STNodeFactory.createNilLiteralNode(openParen, closeParen); } } private STNode parseAnonFuncExprOrTypedBPWithFuncType() { STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(); if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) { return exprOrTypeDesc; } return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT); } /** * Parse anon-func-expr or function-type-desc, by resolving the ambiguity. * * @return Anon-func-expr or function-type-desc */ private STNode parseAnonFuncExprOrFuncTypeDesc() { startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC); STNode functionKeyword = parseFunctionKeyword(); STNode funcSignature = parseFuncSignature(true); endContext(); switch (peek().kind) { case OPEN_BRACE_TOKEN: case RIGHT_DOUBLE_ARROW_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.ANON_FUNC_EXPRESSION); funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature); STNode funcBody = parseAnonFuncBody(false); STNode annots = STNodeFactory.createEmptyNodeList(); STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, functionKeyword, funcSignature, funcBody); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true); case IDENTIFIER_TOKEN: default: switchContext(ParserRuleContext.VAR_DECL_STMT); STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature); return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); } } private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) { STToken nextToken = peek(); STNode typeDesc; switch (nextToken.kind) { case PIPE_TOKEN: STToken nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode pipe = parsePipeToken(); STNode rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr); case BITWISE_AND_TOKEN: nextNextToken = peek(2); if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) { return typeOrExpr; } STNode ampersand = parseBinaryOperator(); rhsTypeDescOrExpr = parseTypeDescOrExpr(); if (isExpression(rhsTypeDescOrExpr.kind)) { return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand, rhsTypeDescOrExpr); } typeDesc = getTypeDescFromExpr(typeOrExpr); rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr); return createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr); case IDENTIFIER_TOKEN: case QUESTION_MARK_TOKEN: startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); typeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false); endContext(); return typeDesc; case SEMICOLON_TOKEN: return getTypeDescFromExpr(typeOrExpr); case EQUAL_TOKEN: case CLOSE_PAREN_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: case COMMA_TOKEN: return typeOrExpr; case OPEN_BRACKET_TOKEN: return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true, ParserRuleContext.AMBIGUOUS_STMT); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); typeOrExpr = getTypeDescFromExpr(typeOrExpr); return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis); default: if (isCompoundBinaryOperator(nextToken.kind)) { return typeOrExpr; } if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) { return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false); } recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr); return parseTypeDescOrExprRhs(typeOrExpr); } } private boolean isAmbiguous(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: case BRACKETED_LIST: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case INDEXED_EXPRESSION: STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node; if (!isAmbiguous(indexExpr.containerExpression)) { return false; } STNode keys = indexExpr.keyExpression; for (int i = 0; i < keys.bucketCount(); i++) { STNode item = keys.childInBucket(i); if (item.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAmbiguous(item)) { return false; } } return true; default: return false; } } private boolean isAllBasicLiterals(STNode node) { switch (node.kind) { case NIL_LITERAL: case NULL_LITERAL: case NUMERIC_LITERAL: case STRING_LITERAL: case BOOLEAN_LITERAL: return true; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node; if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN || binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) { return false; } return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr); case BRACED_EXPRESSION: return isAmbiguous(((STBracedExpressionNode) node).expression); case BRACKETED_LIST: STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node; for (STNode member : list.members) { if (member.kind == SyntaxKind.COMMA_TOKEN) { continue; } if (!isAllBasicLiterals(member)) { return false; } } return true; case UNARY_EXPRESSION: STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node; if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN && unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) { return false; } return isNumericLiteral(unaryExpr.expression); default: return false; } } private boolean isNumericLiteral(STNode node) { switch (node.kind) { case NUMERIC_LITERAL: return true; default: return false; } } private STNode parseTypedDescOrExprStartsWithOpenBracket() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> members = new ArrayList<>(); STNode memberEnd; while (!isEndOfListConstructor(peek().kind)) { STNode expr = parseTypeDescOrExpr(); members.add(expr); memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } members.add(memberEnd); } STNode memberNodes = STNodeFactory.createNodeList(members); STNode closeBracket = parseCloseBracket(); endContext(); return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket); } /** * Parse binding-patterns. * <p> * <code> * binding-pattern := capture-binding-pattern * | wildcard-binding-pattern * | list-binding-pattern * | mapping-binding-pattern * | functional-binding-pattern * <br/><br/> * <p> * capture-binding-pattern := variable-name * variable-name := identifier * <br/><br/> * <p> * wildcard-binding-pattern := _ * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * <p> * mapping-binding-pattern := { field-binding-patterns } * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/> * field-binding-pattern := field-name : binding-pattern | variable-name * <br/> * rest-binding-pattern := ... variable-name * <p> * <br/><br/> * functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern ) * <br/> * arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns] * | other-arg-binding-patterns * <br/> * positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)* * <br/> * positional-arg-binding-pattern := binding-pattern * <br/> * other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern] * | [rest-binding-pattern] * <br/> * named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)* * <br/> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return binding-pattern node */ private STNode parseBindingPattern() { switch (peek().kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPattern(); case IDENTIFIER_TOKEN: return parseBindingPatternStartsWithIdentifier(); case OPEN_BRACE_TOKEN: return parseMappingBindingPattern(); case ERROR_KEYWORD: return parseErrorBindingPattern(); default: recover(peek(), ParserRuleContext.BINDING_PATTERN); return parseBindingPattern(); } } private STNode parseBindingPatternStartsWithIdentifier() { STNode argNameOrBindingPattern = parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER); STToken secondToken = peek(); if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD); return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern); } if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) { STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN); identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern); return createCaptureOrWildcardBP(identifier); } return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name); } private STNode createCaptureOrWildcardBP(STNode varName) { STNode bindingPattern; if (isWildcardBP(varName)) { bindingPattern = getWildcardBindingPattern(varName); } else { bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName); } return bindingPattern; } /** * Parse list-binding-patterns. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return list-binding-pattern node */ private STNode parseListBindingPattern() { startContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode openBracket = parseOpenBracket(); List<STNode> bindingPatternsList = new ArrayList<>(); STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList); endContext(); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) { if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) { STNode closeBracket = parseCloseBracket(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } STNode listBindingPatternMember = parseListBindingPatternMember(); bindingPatternsList.add(listBindingPatternMember); STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList); return listBindingPattern; } private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) { STNode member = firstMember; STToken token = peek(); STNode listBindingPatternRhs = null; while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) { listBindingPatternRhs = parseListBindingPatternMemberRhs(); if (listBindingPatternRhs == null) { break; } bindingPatterns.add(listBindingPatternRhs); member = parseListBindingPatternMember(); bindingPatterns.add(member); token = peek(); } STNode restBindingPattern; if (member.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1); } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBracket = parseCloseBracket(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); } private STNode parseListBindingPatternMemberRhs() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END); return parseListBindingPatternMemberRhs(); } } private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case CLOSE_BRACKET_TOKEN: case EOF_TOKEN: return true; default: return false; } } /** * Parse list-binding-pattern member. * <p> * <code> * list-binding-pattern := [ list-member-binding-patterns ] * <br/> * list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * </code> * * @return List binding pattern member */ private STNode parseListBindingPatternMember() { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case OPEN_BRACKET_TOKEN: case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER); return parseListBindingPatternMember(); } } /** * Parse rest binding pattern. * <p> * <code> * rest-binding-pattern := ... variable-name * </code> * * @return Rest binding pattern node */ private STNode parseRestBindingPattern() { startContext(ParserRuleContext.REST_BINDING_PATTERN); STNode ellipsis = parseEllipsis(); STNode varName = parseVariableName(); endContext(); STSimpleNameReferenceNode simpleNameReferenceNode = (STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName); return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode); } /** * Parse Typed-binding-pattern. * <p> * <code> * typed-binding-pattern := inferable-type-descriptor binding-pattern * <br/><br/> * inferable-type-descriptor := type-descriptor | var * </code> * * @return Typed binding pattern node */ private STNode parseTypedBindingPattern(ParserRuleContext context) { STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context); return typeBindingPattern; } /** * Parse mapping-binding-patterns. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPattern() { startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); STNode openBrace = parseOpenBrace(); STToken token = peek(); if (isEndOfMappingBindingPattern(token.kind)) { STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } List<STNode> bindingPatterns = new ArrayList<>(); STNode prevMember = parseMappingBindingPatternMember(); if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(prevMember); } return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember); } private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) { STToken token = peek(); STNode mappingBindingPatternRhs = null; while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) { mappingBindingPatternRhs = parseMappingBindingPatternEnd(); if (mappingBindingPatternRhs == null) { break; } bindingPatterns.add(mappingBindingPatternRhs); prevMember = parseMappingBindingPatternMember(); if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { break; } bindingPatterns.add(prevMember); token = peek(); } STNode restBindingPattern; if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = prevMember; } else { restBindingPattern = STNodeFactory.createEmptyNode(); } STNode closeBrace = parseCloseBrace(); STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns); endContext(); return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, restBindingPattern, closeBrace); } /** * Parse mapping-binding-pattern entry. * <p> * <code> * mapping-binding-pattern := { field-binding-patterns } * <br/><br/> * field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern] * | [ rest-binding-pattern ] * <br/><br/> * field-binding-pattern := field-name : binding-pattern * | variable-name * </code> * * @return mapping-binding-pattern node */ private STNode parseMappingBindingPatternMember() { STToken token = peek(); switch (token.kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: return parseFieldBindingPattern(); } } private STNode parseMappingBindingPatternEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACE_TOKEN: return null; default: recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END); return parseMappingBindingPatternEnd(); } } /** * Parse field-binding-pattern. * <code>field-binding-pattern := field-name : binding-pattern | varname</code> * * @return field-binding-pattern node */ private STNode parseFieldBindingPattern() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME); STNode fieldBindingPattern = parseFieldBindingPattern(identifier); return fieldBindingPattern; default: recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME); return parseFieldBindingPattern(); } } private STNode parseFieldBindingPattern(STNode identifier) { STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier); if (peek().kind != SyntaxKind.COLON_TOKEN) { return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference); } STNode colon = parseColon(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern); } private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) { return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN; } private STNode parseErrorTypeDescOrErrorBP(STNode annots) { STToken nextNextToken = peek(2); switch (nextNextToken.kind) { case OPEN_PAREN_TOKEN: return parseAsErrorBindingPattern(); case LT_TOKEN: return parseAsErrorTypeDesc(annots); case IDENTIFIER_TOKEN: SyntaxKind nextNextNextTokenKind = peek(3).kind; if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN || nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseAsErrorBindingPattern(); } default: return parseAsErrorTypeDesc(annots); } } private STNode parseAsErrorBindingPattern() { startContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(parseErrorBindingPattern()); } private STNode parseAsErrorTypeDesc(STNode annots) { STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVariableDecl(getAnnotations(annots), finalKeyword, false); } /** * Parse error binding pattern node. * <p> * <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code> * <br/><br/> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * named-arg-binding-pattern := arg-name = binding-pattern * * @return Error binding pattern node. */ private STNode parseErrorBindingPattern() { startContext(ParserRuleContext.ERROR_BINDING_PATTERN); STNode errorKeyword = parseErrorKeyword(); return parseErrorBindingPattern(errorKeyword); } private STNode parseErrorBindingPattern(STNode errorKeyword) { STToken nextToken = peek(); STNode typeRef; switch (nextToken.kind) { case IDENTIFIER_TOKEN: typeRef = parseTypeReference(); break; case OPEN_PAREN_TOKEN: typeRef = STNodeFactory.createEmptyNode(); break; default: recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS); return parseErrorBindingPattern(errorKeyword); } return parseErrorBindingPattern(errorKeyword, typeRef); } private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) { STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS); STNode argListBindingPatterns = parseErrorArgListBindingPatterns(); STNode closeParenthesis = parseCloseParenthesis(); endContext(); return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis, argListBindingPatterns, closeParenthesis); } /** * Parse error arg list binding pattern. * <p> * <code> * error-arg-list-binding-pattern := * error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns] * | [error-field-binding-patterns] * <br/><br/> * <p> * error-message-binding-pattern := simple-binding-pattern * <br/><br/> * <p> * error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern * <br/><br/> * <p> * simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern * <br/><br/> * <p> * error-field-binding-patterns := * named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern] * | rest-binding-pattern * <br/><br/> * <p> * named-arg-binding-pattern := arg-name = binding-pattern * </code> * * @return Error arg list binding patterns. */ private STNode parseErrorArgListBindingPatterns() { List<STNode> argListBindingPatterns = new ArrayList<>(); if (isEndOfErrorFieldBindingPatterns()) { return STNodeFactory.createNodeList(argListBindingPatterns); } STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START); if (firstArg.kind == SyntaxKind.CAPTURE_BINDING_PATTERN || firstArg.kind == SyntaxKind.WILDCARD_BINDING_PATTERN) { argListBindingPatterns.add(firstArg); STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END); if (argEnd != null) { STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS); if (isValidSecondArgBindingPattern(secondArg.kind)) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(secondArg); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); } } } else { if (firstArg.kind != SyntaxKind.NAMED_ARG_BINDING_PATTERN && firstArg.kind != SyntaxKind.REST_BINDING_PATTERN) { addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED); } else { argListBindingPatterns.add(firstArg); } } parseErrorFieldBindingPatterns(argListBindingPatterns); return STNodeFactory.createNodeList(argListBindingPatterns); } private boolean isValidSecondArgBindingPattern(SyntaxKind syntaxKind) { switch (syntaxKind) { case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: return true; default: return false; } } private void parseErrorFieldBindingPatterns(List<STNode> argListBindingPatterns) { SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_BINDING_PATTERN; while (!isEndOfErrorFieldBindingPatterns()) { STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END); if (argEnd == null) { break; } STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN); DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind); if (errorCode == null) { argListBindingPatterns.add(argEnd); argListBindingPatterns.add(currentArg); lastValidArgKind = currentArg.kind; } else if (argListBindingPatterns.size() == 0) { addInvalidNodeToNextToken(argEnd, null); addInvalidNodeToNextToken(currentArg, errorCode); } else { updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null); updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode); } } } private boolean isEndOfErrorFieldBindingPatterns() { SyntaxKind nextTokenKind = peek().kind; switch (nextTokenKind) { case CLOSE_PAREN_TOKEN: case EOF_TOKEN: return true; default: return false; } } private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) { switch (peek().kind) { case COMMA_TOKEN: return consume(); case CLOSE_PAREN_TOKEN: return null; default: recover(peek(), currentCtx); return parseErrorArgsBindingPatternEnd(currentCtx); } } private STNode parseErrorArgListBindingPattern(ParserRuleContext context) { switch (peek().kind) { case ELLIPSIS_TOKEN: return parseRestBindingPattern(); case IDENTIFIER_TOKEN: return parseNamedOrSimpleArgBindingPattern(); case OPEN_BRACKET_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: return parseBindingPattern(); default: recover(peek(), context); return parseErrorArgListBindingPattern(context); } } private STNode parseNamedOrSimpleArgBindingPattern() { STNode argNameOrSimpleBindingPattern = consume(); STToken secondToken = peek(); switch (secondToken.kind) { case EQUAL_TOKEN: STNode equal = consume(); STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern, equal, bindingPattern); case COMMA_TOKEN: case CLOSE_PAREN_TOKEN: default: return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern); } } private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) { switch (currentArgKind) { case NAMED_ARG_BINDING_PATTERN: case REST_BINDING_PATTERN: if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) { return DiagnosticErrorCode.ERROR_ARG_FOLLOWED_BY_REST_ARG; } return null; case CAPTURE_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: default: return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED; } } /* * This parses Typed binding patterns and deals with ambiguity between types, * and binding patterns. An example is 'T[a]'. * The ambiguity lies in between: * 1) Array Type * 2) List binding pattern * 3) Member access expression. */ /** * Parse the component after the type-desc, of a typed-binding-pattern. * * @param typeDesc Starting type-desc of the typed-binding-pattern * @return Typed-binding pattern */ private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) { return parseTypedBindingPatternTypeRhs(typeDesc, context, true); } private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) { switch (peek().kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode bindingPattern = parseBindingPattern(); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case OPEN_BRACKET_TOKEN: STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context); assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN; return typedBindingPattern; case CLOSE_PAREN_TOKEN: case COMMA_TOKEN: case CLOSE_BRACKET_TOKEN: case CLOSE_BRACE_TOKEN: if (!isRoot) { return typeDesc; } default: recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot); return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot); } } /** * Parse typed-binding pattern with list, array-type-desc, or member-access-expr. * * @param typeDescOrExpr Type desc or the expression at the start * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Parsed node */ private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); if (isBracketedListEnd(peek().kind)) { return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context); } STNode member = parseBracketedListMember(isTypedBindingPattern); SyntaxKind currentNodeType = getBracketedListNodeType(member); switch (currentNodeType) { case ARRAY_TYPE_DESC: STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context); return typedBindingPattern; case LIST_BINDING_PATTERN: STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); case INDEXED_EXPRESSION: return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member); case NONE: default: break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd != null) { List<STNode> memberList = new ArrayList<>(); memberList.add(member); memberList.add(memberEnd); STNode bindingPattern = parseAsListBindingPattern(openBracket, memberList); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) { member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true); STNode closeBracket = parseCloseBracket(); endContext(); STNode keyExpr = STNodeFactory.createNodeList(member); STNode memberAccessExpr = STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket); return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false); } private boolean isBracketedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACKET_TOKEN: return true; default: return false; } } /** * Parse a member of an ambiguous bracketed list. This member could be: * 1) Array length * 2) Key expression of a member-access-expr * 3) A member-binding pattern of a list-binding-pattern. * * @param isTypedBindingPattern Is this in a definite typed-binding pattern * @return Parsed member node */ private STNode parseBracketedListMember(boolean isTypedBindingPattern) { STToken nextToken = peek(); switch (nextToken.kind) { case DECIMAL_INTEGER_LITERAL_TOKEN: case HEX_INTEGER_LITERAL_TOKEN: case ASTERISK_TOKEN: case STRING_LITERAL_TOKEN: return parseBasicLiteral(); case CLOSE_BRACKET_TOKEN: return STNodeFactory.createEmptyNode(); case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: case ELLIPSIS_TOKEN: case OPEN_BRACKET_TOKEN: return parseStatementStartBracketedListMember(); case IDENTIFIER_TOKEN: if (isTypedBindingPattern) { return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); } break; default: if (!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) { break; } ParserRuleContext recoverContext = isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH : ParserRuleContext.BRACKETED_LIST_MEMBER; recover(peek(), recoverContext, isTypedBindingPattern); return parseBracketedListMember(isTypedBindingPattern); } STNode expr = parseExpression(); if (isWildcardBP(expr)) { return getWildcardBindingPattern(expr); } return expr; } /** * Treat the current node as an array, and parse the remainder of the binding pattern. * * @param typeDesc Type-desc * @param openBracket Open bracket * @param member Member * @return Parsed node */ private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) { typeDesc = getTypeDescFromExpr(typeDesc); typeDesc = validateForUsageOfVar(typeDesc); STNode closeBracket = parseCloseBracket(); endContext(); return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true, context); } private STNode parseBracketedListMemberEnd() { switch (peek().kind) { case COMMA_TOKEN: return parseComma(); case CLOSE_BRACKET_TOKEN: return null; default: recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END); return parseBracketedListMemberEnd(); } } /** * We reach here to break ambiguity of T[a]. This could be: * 1) Array Type Desc * 2) Member access on LHS * 3) Typed-binding-pattern * * @param typeDescOrExpr Type name or the expr that precede the open-bracket. * @param openBracket Open bracket * @param member Member * @param closeBracket Open bracket * @param isTypedBindingPattern Is this is a typed-binding-pattern. * @return Specific node that matches to T[a], after solving ambiguity. */ private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, boolean isTypedBindingPattern, boolean allowAssignment, ParserRuleContext context) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: case ERROR_KEYWORD: STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); case OPEN_BRACKET_TOKEN: if (isTypedBindingPattern) { typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket); return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context); } STNode keyExpr = STNodeFactory.createNodeList(member); STNode expr = STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context); case QUESTION_MARK_TOKEN: typeDesc = getTypeDescFromExpr(typeDescOrExpr); arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc); typeDesc = parseComplexTypeDescriptor(arrayTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); return parseTypedBindingPatternTypeRhs(typeDesc, context); case PIPE_TOKEN: case BITWISE_AND_TOKEN: return parseComplexTypeDescInTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket, context, isTypedBindingPattern); case IN_KEYWORD: if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case EQUAL_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) { return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); } keyExpr = STNodeFactory.createNodeList(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); case SEMICOLON_TOKEN: if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) { break; } return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket); case CLOSE_BRACE_TOKEN: case COMMA_TOKEN: if (context == ParserRuleContext.AMBIGUOUS_STMT) { keyExpr = STNodeFactory.createNodeList(member); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } default: if (isValidExprRhsStart(nextToken.kind, closeBracket.kind)) { keyExpr = STNodeFactory.createNodeList(member); typeDescOrExpr = getExpression(typeDescOrExpr); return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket); } break; } recover(peek(), ParserRuleContext.BRACKETED_LIST_RHS, typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket, isTypedBindingPattern, allowAssignment, context); } private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket) { STNode bindingPatterns; if (isEmpty(member)) { bindingPatterns = STNodeFactory.createEmptyNodeList(); } else { STNode bindingPattern = getBindingPattern(member); bindingPatterns = STNodeFactory.createNodeList(bindingPattern); } STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, restBindingPattern, closeBracket); STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr); return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern); } /** * Parse a union or intersection type-desc/binary-expression that involves ambiguous * bracketed list in lhs. * <p> * e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code> * <p> * Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this * is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However, * if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes * the type-desc, and <code>[b]</code> becomes the binding pattern. * * @param typeDescOrExpr Type desc or the expression * @param openBracket Open bracket * @param member Member * @param closeBracket Close bracket * @param context COntext in which the typed binding pattern occurs * @return Parsed node */ private STNode parseComplexTypeDescInTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member, STNode closeBracket, ParserRuleContext context, boolean isTypedBindingPattern) { STNode pipeOrAndToken = parseUnionOrIntersectionToken(); STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false); if (isTypedBindingPattern || typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) { STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr); lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc); STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr; STNode newTypeDesc; if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) { newTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } else { newTypeDesc = createIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor); } return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern); } else { STNode keyExpr = getExpression(member); STNode containerExpr = getExpression(typeDescOrExpr); STNode lhsExpr = STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket); return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken, typedBindingPatternOrExpr); } } private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) { if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) { STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc); lhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc); } else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) { STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc; STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc); lhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc, intersectionTypeDesc.bitwiseAndToken, middleTypeDesc); } else { lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket); } return lhsTypeDesc; } /** * Parse union (|) or intersection (&) type operator. * * @return pipe or bitwise and token */ private STNode parseUnionOrIntersectionToken() { STToken token = peek(); if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) { return consume(); } else { recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN); return parseUnionOrIntersectionToken(); } } /** * Infer the type of the ambiguous bracketed list, based on the type of the member. * * @param memberNode Member node * @return Inferred type of the bracketed list */ private SyntaxKind getBracketedListNodeType(STNode memberNode) { if (isEmpty(memberNode)) { return SyntaxKind.NONE; } if (isDefiniteTypeDesc(memberNode.kind)) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case NUMERIC_LITERAL: case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.NONE; default: return SyntaxKind.INDEXED_EXPRESSION; } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-bracket. * The ambiguity lies in between: * 1) Assignment that starts with list binding pattern * 2) Var-decl statement that starts with tuple type * 3) Statement that starts with list constructor, such as sync-send, etc. */ /** * Parse any statement that starts with an open-bracket. * * @param annots Annotations attached to the statement. * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) { startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT); return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField); } private STNode parseMemberBracketedList(boolean possibleMappingField) { STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, false, possibleMappingField); } /** * The bracketed list at the start of a statement can be one of the following. * 1) List binding pattern * 2) Tuple type * 3) List constructor * * @param isRoot Is this the root of the list * @return Parsed node */ private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) { startContext(ParserRuleContext.STMT_START_BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); while (!isBracketedListEnd(peek().kind)) { STNode member = parseStatementStartBracketedListMember(); SyntaxKind currentNodeType = getStmtStartBracketedListType(member); switch (currentNodeType) { case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot); case NONE: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); } STNode closeBracket = parseCloseBracket(); STNode bracketedList = parseStatementStartBracketedList(annots, openBracket, memberList, closeBracket, isRoot, possibleMappingField); return bracketedList; } /** * Parse a member of a list-binding-pattern, tuple-type-desc, or * list-constructor-expr, when the parent is ambiguous. * * @param nextTokenKind Kind of the next token. * @return Parsed node */ private STNode parseStatementStartBracketedListMember() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseMemberBracketedList(false); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { STNode varName = ((STSimpleNameReferenceNode) identifier).name; return getWildcardBindingPattern(varName); } if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true); case OPEN_BRACE_TOKEN: return parseMappingBindingPatterOrMappingConstructor(); case ERROR_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseErrorConstructorExpr(); } if (peek(2).kind == SyntaxKind.IDENTIFIER_TOKEN) { return parseErrorBindingPattern(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(nextToken, ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER); return parseStatementStartBracketedListMember(); } } private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode tupleTypeDescOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot); } return tupleTypeDescOrListCons; } /** * Parse tuple type desc or list constructor. * * @return Parsed node */ private STNode parseTupleTypeDescOrListConstructor(STNode annots) { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false); } private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseTupleTypeDescOrListConstructorMember(annots); SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC: return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); case TUPLE_TYPE_DESC_OR_LIST_CONST: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot); } private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseTupleTypeDescOrListConstructor(annots); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) { STNode ellipsis = parseEllipsis(); return STNodeFactory.createRestDescriptorNode(identifier, ellipsis); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingConstructorExpr(); case ERROR_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.OPEN_PAREN_TOKEN) { return parseErrorConstructorExpr(); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case XML_KEYWORD: case STRING_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.BACKTICK_TOKEN) { return parseExpression(false); } return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); case TABLE_KEYWORD: case STREAM_KEYWORD: if (getNextNextToken(nextToken.kind).kind == SyntaxKind.LT_TOKEN) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } return parseExpression(false); case OPEN_PAREN_TOKEN: return parseTypeDescOrExpr(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(false); } if (isTypeStartingToken(nextToken.kind)) { return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE); } recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots); return parseTupleTypeDescOrListConstructorMember(annots); } } private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) { return getStmtStartBracketedListType(memberNode); } private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode tupleTypeOrListConst; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind) || (isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); tupleTypeOrListConst = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); } endContext(); if (!isRoot) { return tupleTypeOrListConst; } STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot); } private STNode parseStmtStartsWithTupleTypeOrExprRhs(STNode annots, STNode tupleTypeOrListConst, boolean isRoot) { if (tupleTypeOrListConst.kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && tupleTypeOrListConst.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) { STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(tupleTypeOrListConst, ParserRuleContext.VAR_DECL_STMT, isRoot); if (!isRoot) { return typedBindingPattern; } switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typedBindingPattern, false); } STNode expr = getExpression(tupleTypeOrListConst); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } private STNode parseAsTupleTypeDesc(STNode annots, STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList = getTypeDescList(memberList); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode tupleTypeMembers = parseTupleTypeMembers(member, memberList); STNode closeBracket = parseCloseBracket(); endContext(); STNode tupleType = STNodeFactory.createTupleTypeDescriptorNode(openBracket, tupleTypeMembers, closeBracket); STNode typeDesc = parseComplexTypeDescriptor(tupleType, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); endContext(); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT, isRoot); if (!isRoot) { return typedBindingPattern; } switchContext(ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, STNodeFactory.createEmptyNode(), typedBindingPattern, false); } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList = getBindingPatternsList(memberList); memberList.add(member); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, member, memberList); endContext(); if (!isRoot) { return listBindingPattern; } return parseAssignmentStmtRhs(listBindingPattern); } private STNode parseAsListBindingPattern(STNode openBracket, List<STNode> memberList) { memberList = getBindingPatternsList(memberList); switchContext(ParserRuleContext.LIST_BINDING_PATTERN); STNode listBindingPattern = parseListBindingPattern(openBracket, memberList); endContext(); return listBindingPattern; } private STNode parseAsListBindingPatternOrListConstructor(STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); STNode memberEnd = parseBracketedListMemberEnd(); STNode listBindingPatternOrListCons; if (memberEnd == null) { STNode closeBracket = parseCloseBracket(); listBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot); } else { memberList.add(memberEnd); listBindingPatternOrListCons = parseListBindingPatternOrListConstructor(openBracket, memberList, isRoot); } return listBindingPatternOrListCons; } private SyntaxKind getStmtStartBracketedListType(STNode memberNode) { if (memberNode.kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && memberNode.kind.compareTo(SyntaxKind.TYPEDESC_TYPE_DESC) <= 0) { return SyntaxKind.TUPLE_TYPE_DESC; } switch (memberNode.kind) { case NUMERIC_LITERAL: case ASTERISK_LITERAL: return SyntaxKind.ARRAY_TYPE_DESC; case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: case ERROR_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case QUALIFIED_NAME_REFERENCE: case REST_TYPE: return SyntaxKind.TUPLE_TYPE_DESC; case LIST_CONSTRUCTOR: case MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_CONSTRUCTOR; case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR; case SIMPLE_NAME_REFERENCE: case BRACKETED_LIST: return SyntaxKind.NONE; case FUNCTION_CALL: if (isPossibleErrorBindingPattern((STFunctionCallExpressionNode) memberNode)) { return SyntaxKind.NONE; } return SyntaxKind.LIST_CONSTRUCTOR; case INDEXED_EXPRESSION: return SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST; default: if (isExpression(memberNode.kind) && !isAllBasicLiterals(memberNode) && !isAmbiguous(memberNode)) { return SyntaxKind.LIST_CONSTRUCTOR; } return SyntaxKind.NONE; } } private boolean isPossibleErrorBindingPattern(STFunctionCallExpressionNode funcCall) { STNode args = funcCall.arguments; int size = args.bucketCount(); for (int i = 0; i < size; i++) { STNode arg = args.childInBucket(i); if (arg.kind != SyntaxKind.NAMED_ARG && arg.kind != SyntaxKind.POSITIONAL_ARG && arg.kind != SyntaxKind.REST_ARG) { continue; } if (!isPosibleArgBindingPattern((STFunctionArgumentNode) arg)) { return false; } } return true; } private boolean isPosibleArgBindingPattern(STFunctionArgumentNode arg) { switch (arg.kind) { case POSITIONAL_ARG: STNode expr = ((STPositionalArgumentNode) arg).expression; return isPosibleBindingPattern(expr); case NAMED_ARG: expr = ((STNamedArgumentNode) arg).expression; return isPosibleBindingPattern(expr); case REST_ARG: expr = ((STRestArgumentNode) arg).expression; return expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE; default: return false; } } private boolean isPosibleBindingPattern(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: return true; case LIST_CONSTRUCTOR: STListConstructorExpressionNode listConstructor = (STListConstructorExpressionNode) node; for (int i = 0; i < listConstructor.bucketCount(); i++) { STNode expr = listConstructor.childInBucket(i); if (!isPosibleBindingPattern(expr)) { return false; } } return true; case MAPPING_CONSTRUCTOR: STMappingConstructorExpressionNode mappingConstructor = (STMappingConstructorExpressionNode) node; for (int i = 0; i < mappingConstructor.bucketCount(); i++) { STNode expr = mappingConstructor.childInBucket(i); if (!isPosibleBindingPattern(expr)) { return false; } } return true; case SPECIFIC_FIELD: STSpecificFieldNode specificField = (STSpecificFieldNode) node; if (specificField.readonlyKeyword != null) { return false; } if (specificField.valueExpr == null) { return true; } return isPosibleBindingPattern(specificField.valueExpr); case FUNCTION_CALL: return isPossibleErrorBindingPattern((STFunctionCallExpressionNode) node); default: return false; } } private STNode parseStatementStartBracketedList(STNode annots, STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot, boolean possibleMappingField) { STToken nextToken = peek(); switch (nextToken.kind) { case EQUAL_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); } STNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(members)); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode listBindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, memberBindingPatterns, restBindingPattern, closeBracket); endContext(); switchContext(ParserRuleContext.ASSIGNMENT_STMT); return parseAssignmentStmtRhs(listBindingPattern); case IDENTIFIER_TOKEN: case OPEN_BRACE_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); } if (members.isEmpty()) { openBracket = SyntaxErrors.addDiagnostic(openBracket, DiagnosticErrorCode.ERROR_MISSING_TUPLE_MEMBER); } switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE); STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); STNode tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); endContext(); STNode typeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); STNode typedBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); endContext(); return parseStmtStartsWithTypedBPOrExprRhs(annots, typedBindingPattern); case OPEN_BRACKET_TOKEN: if (!isRoot) { memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members)); tupleTypeDesc = STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket); endContext(); typeDesc = parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false); return typeDesc; } STAmbiguousCollectionNode list = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); endContext(); STNode tpbOrExpr = parseTypedBindingPatternOrExprRhs(list, true); return parseStmtStartsWithTypedBPOrExprRhs(annots, tpbOrExpr); case COLON_TOKEN: if (possibleMappingField && members.size() == 1) { startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode colon = parseColon(); STNode fieldNameExpr = getExpression(members.get(0)); STNode valueExpr = parseExpression(); return STNodeFactory.createComputedNameFieldNode(openBracket, fieldNameExpr, closeBracket, colon, valueExpr); } default: endContext(); if (!isRoot) { return new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); } list = new STAmbiguousCollectionNode(SyntaxKind.BRACKETED_LIST, openBracket, members, closeBracket); STNode exprOrTPB = parseTypedBindingPatternOrExprRhs(list, false); return parseStmtStartsWithTypedBPOrExprRhs(annots, exprOrTPB); } } private boolean isWildcardBP(STNode node) { switch (node.kind) { case SIMPLE_NAME_REFERENCE: STToken nameToken = (STToken) ((STSimpleNameReferenceNode) node).name; return isUnderscoreToken(nameToken); case IDENTIFIER_TOKEN: return isUnderscoreToken((STToken) node); default: return false; } } private boolean isUnderscoreToken(STToken token) { return "_".equals(token.text()); } private STNode getWildcardBindingPattern(STNode identifier) { switch (identifier.kind) { case SIMPLE_NAME_REFERENCE: STNode varName = ((STSimpleNameReferenceNode) identifier).name; return STNodeFactory.createWildcardBindingPatternNode(varName); case IDENTIFIER_TOKEN: return STNodeFactory.createWildcardBindingPatternNode(identifier); default: throw new IllegalStateException(); } } /* * This section tries to break the ambiguity in parsing a statement that starts with a open-brace. */ /** * Parse statements that starts with open-brace. It could be a: * 1) Block statement * 2) Var-decl with mapping binding pattern. * 3) Statement that starts with mapping constructor expression. * * @return Parsed node */ private STNode parseStatementStartsWithOpenBrace() { startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode openBrace = parseOpenBrace(); if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) { STNode closeBrace = parseCloseBrace(); switch (peek().kind) { case EQUAL_TOKEN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); STNode fields = STNodeFactory.createEmptyNodeList(); STNode restBindingPattern = STNodeFactory.createEmptyNode(); STNode bindingPattern = STNodeFactory.createMappingBindingPatternNode(openBrace, fields, restBindingPattern, closeBrace); return parseAssignmentStmtRhs(bindingPattern); case RIGHT_ARROW_TOKEN: case SYNC_SEND_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); fields = STNodeFactory.createEmptyNodeList(); STNode expr = STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); default: STNode statements = STNodeFactory.createEmptyNodeList(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace); } } STNode member = parseStatementStartingBracedListFirstMember(); SyntaxKind nodeType = getBracedListType(member); STNode stmt; switch (nodeType) { case MAPPING_BINDING_PATTERN: return parseStmtAsMappingBindingPatternStart(openBrace, member); case MAPPING_CONSTRUCTOR: return parseStmtAsMappingConstructorStart(openBrace, member); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return parseStmtAsMappingBPOrMappingConsStart(openBrace, member); case BLOCK_STATEMENT: STNode closeBrace = parseCloseBrace(); stmt = STNodeFactory.createBlockStatementNode(openBrace, member, closeBrace); endContext(); return stmt; default: ArrayList<STNode> stmts = new ArrayList<>(); stmts.add(member); STNode statements = parseStatements(stmts); closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createBlockStatementNode(openBrace, statements, closeBrace); } } /** * Parse the rest of the statement, treating the start as a mapping binding pattern. * * @param openBrace Open brace * @param firstMappingField First member * @return Parsed node */ private STNode parseStmtAsMappingBindingPatternStart(STNode openBrace, STNode firstMappingField) { switchContext(ParserRuleContext.ASSIGNMENT_STMT); startContext(ParserRuleContext.MAPPING_BINDING_PATTERN); List<STNode> bindingPatterns = new ArrayList<>(); if (firstMappingField.kind != SyntaxKind.REST_BINDING_PATTERN) { bindingPatterns.add(getBindingPattern(firstMappingField)); } STNode mappingBP = parseMappingBindingPattern(openBrace, bindingPatterns, firstMappingField); return parseAssignmentStmtRhs(mappingBP); } /** * Parse the rest of the statement, treating the start as a mapping constructor expression. * * @param openBrace Open brace * @param firstMember First member * @return Parsed node */ private STNode parseStmtAsMappingConstructorStart(STNode openBrace, STNode firstMember) { switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); List<STNode> members = new ArrayList<>(); STNode mappingCons = parseAsMappingConstructor(openBrace, members, firstMember); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, mappingCons, false, true); return parseStatementStartWithExprRhs(expr); } /** * Parse the braced-list as a mapping constructor expression. * * @param openBrace Open brace * @param members members list * @param member Most recently parsed member * @return Parsed node */ private STNode parseAsMappingConstructor(STNode openBrace, List<STNode> members, STNode member) { members.add(member); members = getExpressionList(members); switchContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode fields = parseMappingConstructorFields(members); STNode closeBrace = parseCloseBrace(); endContext(); return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace); } /** * Parse the rest of the statement, treating the start as a mapping binding pattern * or a mapping constructor expression. * * @param openBrace Open brace * @param member First member * @return Parsed node */ private STNode parseStmtAsMappingBPOrMappingConsStart(STNode openBrace, STNode member) { startContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR); List<STNode> members = new ArrayList<>(); members.add(member); STNode bpOrConstructor; STNode memberEnd = parseMappingFieldEnd(); if (memberEnd == null) { STNode closeBrace = parseCloseBrace(); bpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members, closeBrace); } else { members.add(memberEnd); bpOrConstructor = parseMappingBindingPatternOrMappingConstructor(openBrace, members); } switch (bpOrConstructor.kind) { case MAPPING_CONSTRUCTOR: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, bpOrConstructor, false, true); return parseStatementStartWithExprRhs(expr); case MAPPING_BINDING_PATTERN: switchContext(ParserRuleContext.ASSIGNMENT_STMT); STNode bindingPattern = getBindingPattern(bpOrConstructor); return parseAssignmentStmtRhs(bindingPattern); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: if (peek().kind == SyntaxKind.EQUAL_TOKEN) { switchContext(ParserRuleContext.ASSIGNMENT_STMT); bindingPattern = getBindingPattern(bpOrConstructor); return parseAssignmentStmtRhs(bindingPattern); } switchContext(ParserRuleContext.EXPRESSION_STATEMENT); expr = getExpression(bpOrConstructor); expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true); return parseStatementStartWithExprRhs(expr); } } /** * Parse a member of a braced-list that occurs at the start of a statement. * * @return Parsed node */ private STNode parseStatementStartingBracedListFirstMember() { STToken nextToken = peek(); switch (nextToken.kind) { case READONLY_KEYWORD: STNode readonlyKeyword = parseReadonlyKeyword(); return bracedListMemberStartsWithReadonly(readonlyKeyword); case IDENTIFIER_TOKEN: readonlyKeyword = STNodeFactory.createEmptyNode(); return parseIdentifierRhsInStmtStartingBrace(readonlyKeyword); case STRING_LITERAL_TOKEN: STNode key = parseStringLiteral(); if (peek().kind == SyntaxKind.COLON_TOKEN) { readonlyKeyword = STNodeFactory.createEmptyNode(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, key, false, true); return parseStatementStartWithExprRhs(expr); case OPEN_BRACKET_TOKEN: STNode annots = STNodeFactory.createEmptyNodeList(); return parseStatementStartsWithOpenBracket(annots, true); case OPEN_BRACE_TOKEN: switchContext(ParserRuleContext.BLOCK_STMT); return parseStatementStartsWithOpenBrace(); case ELLIPSIS_TOKEN: return parseRestBindingPattern(); default: switchContext(ParserRuleContext.BLOCK_STMT); return parseStatements(); } } private STNode bracedListMemberStartsWithReadonly(STNode readonlyKeyword) { STToken nextToken = peek(); switch (nextToken.kind) { case IDENTIFIER_TOKEN: return parseIdentifierRhsInStmtStartingBrace(readonlyKeyword); case STRING_LITERAL_TOKEN: if (peek(2).kind == SyntaxKind.COLON_TOKEN) { STNode key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); } default: switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.VAR_DECL_STMT); startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN); STNode typeDesc = parseComplexTypeDescriptor(readonlyKeyword, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); endContext(); STNode annots = STNodeFactory.createEmptyNodeList(); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typedBP = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); return parseVarDeclRhs(annots, finalKeyword, typedBP, false); } } /** * Parse the rhs components of an identifier that follows an open brace, * at the start of a statement. i.e: "{foo". * * @param readonlyKeyword Readonly keyword * @return Parsed node */ private STNode parseIdentifierRhsInStmtStartingBrace(STNode readonlyKeyword) { STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); switch (peek().kind) { case COMMA_TOKEN: STNode colon = STNodeFactory.createEmptyNode(); STNode value = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value); case COLON_TOKEN: colon = parseColon(); if (!isEmpty(readonlyKeyword)) { value = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, value); } switch (peek().kind) { case OPEN_BRACKET_TOKEN: STNode bindingPatternOrExpr = parseListBindingPatternOrListConstructor(); return getMappingField(identifier, colon, bindingPatternOrExpr); case OPEN_BRACE_TOKEN: bindingPatternOrExpr = parseMappingBindingPatterOrMappingConstructor(); return getMappingField(identifier, colon, bindingPatternOrExpr); case IDENTIFIER_TOKEN: return parseQualifiedIdentifierRhsInStmtStartBrace(identifier, colon); default: STNode expr = parseExpression(); return getMappingField(identifier, colon, expr); } default: switchContext(ParserRuleContext.BLOCK_STMT); if (!isEmpty(readonlyKeyword)) { startContext(ParserRuleContext.VAR_DECL_STMT); STNode bindingPattern = STNodeFactory.createCaptureBindingPatternNode(identifier); STNode typedBindingPattern = STNodeFactory.createTypedBindingPatternNode(readonlyKeyword, bindingPattern); STNode annots = STNodeFactory.createEmptyNodeList(); STNode finalKeyword = STNodeFactory.createEmptyNode(); return parseVarDeclRhs(annots, finalKeyword, typedBindingPattern, false); } startContext(ParserRuleContext.AMBIGUOUS_STMT); STNode qualifiedIdentifier = parseQualifiedIdentifier(identifier, false); STNode expr = parseTypedBindingPatternOrExprRhs(qualifiedIdentifier, true); STNode annots = STNodeFactory.createEmptyNodeList(); return parseStmtStartsWithTypedBPOrExprRhs(annots, expr); } } /** * Parse the rhs components of "<code>{ identifier : identifier</code>", * at the start of a statement. i.e: "{foo:bar". * * @return Parsed node */ private STNode parseQualifiedIdentifierRhsInStmtStartBrace(STNode identifier, STNode colon) { STNode secondIdentifier = parseIdentifier(ParserRuleContext.VARIABLE_REF); STNode secondNameRef = STNodeFactory.createSimpleNameReferenceNode(secondIdentifier); if (isWildcardBP(secondIdentifier)) { return getWildcardBindingPattern(secondIdentifier); } STNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondNameRef); switch (peek().kind) { case COMMA_TOKEN: return qualifiedNameRef; case OPEN_BRACE_TOKEN: case IDENTIFIER_TOKEN: STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(qualifiedNameRef, ParserRuleContext.VAR_DECL_STMT); STNode annots = STNodeFactory.createEmptyNodeList(); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false); case OPEN_BRACKET_TOKEN: return parseMemberRhsInStmtStartWithBrace(identifier, colon, secondNameRef); case QUESTION_MARK_TOKEN: STNode typeDesc = parseComplexTypeDescriptor(qualifiedNameRef, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true); finalKeyword = STNodeFactory.createEmptyNode(); typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT); annots = STNodeFactory.createEmptyNodeList(); return parseVarDeclRhs(annots, finalKeyword, typeBindingPattern, false); case EQUAL_TOKEN: case SEMICOLON_TOKEN: return parseStatementStartWithExprRhs(qualifiedNameRef); case PIPE_TOKEN: case BITWISE_AND_TOKEN: default: return parseMemberWithExprInRhs(identifier, colon, secondNameRef, secondNameRef); } } private SyntaxKind getBracedListType(STNode member) { switch (member.kind) { case FIELD_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case SPECIFIC_FIELD: STNode expr = ((STSpecificFieldNode) member).valueExpr; if (expr == null) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } switch (expr.kind) { case SIMPLE_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; case ERROR_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case FUNCTION_CALL: if (isPossibleErrorBindingPattern((STFunctionCallExpressionNode) expr)) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } return SyntaxKind.MAPPING_CONSTRUCTOR; default: return SyntaxKind.MAPPING_CONSTRUCTOR; } case SPREAD_FIELD: case COMPUTED_NAME_FIELD: return SyntaxKind.MAPPING_CONSTRUCTOR; case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: case REST_BINDING_PATTERN: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; case LIST: return SyntaxKind.BLOCK_STATEMENT; default: return SyntaxKind.NONE; } } /** * Parse mapping binding pattern or mapping constructor. * * @return Parsed node */ private STNode parseMappingBindingPatterOrMappingConstructor() { startContext(ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR); STNode openBrace = parseOpenBrace(); List<STNode> memberList = new ArrayList<>(); return parseMappingBindingPatternOrMappingConstructor(openBrace, memberList); } private boolean isBracedListEnd(SyntaxKind nextTokenKind) { switch (nextTokenKind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return true; default: return false; } } private STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List<STNode> memberList) { STToken nextToken = peek(); while (!isBracedListEnd(nextToken.kind)) { STNode member = parseMappingBindingPatterOrMappingConstructorMember(); SyntaxKind currentNodeType = getTypeOfMappingBPOrMappingCons(member); switch (currentNodeType) { case MAPPING_CONSTRUCTOR: return parseAsMappingConstructor(openBrace, memberList, member); case MAPPING_BINDING_PATTERN: return parseAsMappingBindingPattern(openBrace, memberList, member); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: memberList.add(member); break; } STNode memberEnd = parseMappingFieldEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBrace = parseCloseBrace(); return parseMappingBindingPatternOrMappingConstructor(openBrace, memberList, closeBrace); } private STNode parseMappingBindingPatterOrMappingConstructorMember() { switch (peek().kind) { case IDENTIFIER_TOKEN: STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME); return parseMappingFieldRhs(key); case STRING_LITERAL_TOKEN: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); key = parseStringLiteral(); STNode colon = parseColon(); STNode valueExpr = parseExpression(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); case OPEN_BRACKET_TOKEN: return parseComputedField(); case ELLIPSIS_TOKEN: STNode ellipsis = parseEllipsis(); STNode expr = parseExpression(); if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) { return STNodeFactory.createRestBindingPatternNode(ellipsis, expr); } return STNodeFactory.createSpreadFieldNode(ellipsis, expr); default: recover(peek(), ParserRuleContext.MAPPING_BP_OR_MAPPING_CONSTRUCTOR_MEMBER); return parseMappingBindingPatterOrMappingConstructorMember(); } } private STNode parseMappingFieldRhs(STNode key) { STNode colon; STNode valueExpr; switch (peek().kind) { case COLON_TOKEN: colon = parseColon(); return parseMappingFieldValue(key, colon); case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); colon = STNodeFactory.createEmptyNode(); valueExpr = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, valueExpr); default: STToken token = peek(); recover(token, ParserRuleContext.FIELD_BINDING_PATTERN_END, key); readonlyKeyword = STNodeFactory.createEmptyNode(); return parseSpecificFieldRhs(readonlyKeyword, key); } } private STNode parseMappingFieldValue(STNode key, STNode colon) { STNode expr; switch (peek().kind) { case IDENTIFIER_TOKEN: expr = parseExpression(); break; case OPEN_BRACKET_TOKEN: expr = parseListBindingPatternOrListConstructor(); break; case OPEN_BRACE_TOKEN: expr = parseMappingBindingPatterOrMappingConstructor(); break; default: expr = parseExpression(); break; } if (isBindingPattern(expr.kind)) { return STNodeFactory.createFieldBindingPatternFullNode(key, colon, expr); } STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, key, colon, expr); } private boolean isBindingPattern(SyntaxKind kind) { switch (kind) { case FIELD_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return true; default: return false; } } private SyntaxKind getTypeOfMappingBPOrMappingCons(STNode memberNode) { switch (memberNode.kind) { case FIELD_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.MAPPING_BINDING_PATTERN; case SPECIFIC_FIELD: STNode expr = ((STSpecificFieldNode) memberNode).valueExpr; if (expr == null || expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE || expr.kind == SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR || expr.kind == SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR) { return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } return SyntaxKind.MAPPING_CONSTRUCTOR; case SPREAD_FIELD: case COMPUTED_NAME_FIELD: return SyntaxKind.MAPPING_CONSTRUCTOR; case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case REST_BINDING_PATTERN: default: return SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR; } } private STNode parseMappingBindingPatternOrMappingConstructor(STNode openBrace, List<STNode> members, STNode closeBrace) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.MAPPING_BP_OR_MAPPING_CONSTRUCTOR, openBrace, members, closeBrace); } private STNode parseAsMappingBindingPattern(STNode openBrace, List<STNode> members, STNode member) { members.add(member); members = getBindingPatternsList(members); switchContext(ParserRuleContext.MAPPING_BINDING_PATTERN); return parseMappingBindingPattern(openBrace, members, member); } /** * Parse list binding pattern or list constructor. * * @return Parsed node */ private STNode parseListBindingPatternOrListConstructor() { startContext(ParserRuleContext.BRACKETED_LIST); STNode openBracket = parseOpenBracket(); List<STNode> memberList = new ArrayList<>(); return parseListBindingPatternOrListConstructor(openBracket, memberList, false); } private STNode parseListBindingPatternOrListConstructor(STNode openBracket, List<STNode> memberList, boolean isRoot) { STToken nextToken = peek(); while (!isBracketedListEnd(nextToken.kind)) { STNode member = parseListBindingPatternOrListConstructorMember(); SyntaxKind currentNodeType = getParsingNodeTypeOfListBPOrListCons(member); switch (currentNodeType) { case LIST_CONSTRUCTOR: return parseAsListConstructor(openBracket, memberList, member, isRoot); case LIST_BINDING_PATTERN: return parseAsListBindingPattern(openBracket, memberList, member, isRoot); case LIST_BP_OR_LIST_CONSTRUCTOR: default: memberList.add(member); break; } STNode memberEnd = parseBracketedListMemberEnd(); if (memberEnd == null) { break; } memberList.add(memberEnd); nextToken = peek(); } STNode closeBracket = parseCloseBracket(); return parseListBindingPatternOrListConstructor(openBracket, memberList, closeBracket, isRoot); } private STNode parseListBindingPatternOrListConstructorMember() { STToken nextToken = peek(); switch (nextToken.kind) { case OPEN_BRACKET_TOKEN: return parseListBindingPatternOrListConstructor(); case IDENTIFIER_TOKEN: STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF); if (isWildcardBP(identifier)) { return getWildcardBindingPattern(identifier); } return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false); case OPEN_BRACE_TOKEN: return parseMappingBindingPatterOrMappingConstructor(); case ELLIPSIS_TOKEN: return parseListBindingPatternMember(); default: if (isValidExpressionStart(nextToken.kind, 1)) { return parseExpression(); } recover(peek(), ParserRuleContext.LIST_BP_OR_LIST_CONSTRUCTOR_MEMBER); return parseListBindingPatternOrListConstructorMember(); } } private SyntaxKind getParsingNodeTypeOfListBPOrListCons(STNode memberNode) { switch (memberNode.kind) { case CAPTURE_BINDING_PATTERN: case LIST_BINDING_PATTERN: case REST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: case WILDCARD_BINDING_PATTERN: return SyntaxKind.LIST_BINDING_PATTERN; case SIMPLE_NAME_REFERENCE: case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: return SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR; default: return SyntaxKind.LIST_CONSTRUCTOR; } } private STNode parseAsListConstructor(STNode openBracket, List<STNode> memberList, STNode member, boolean isRoot) { memberList.add(member); memberList = getExpressionList(memberList); switchContext(ParserRuleContext.LIST_CONSTRUCTOR); STNode expressions = parseOptionalExpressionsList(memberList); STNode closeBracket = parseCloseBracket(); STNode listConstructor = STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket); endContext(); STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, listConstructor, false, false); if (!isRoot) { return expr; } return parseStatementStartWithExprRhs(expr); } private STNode parseListBindingPatternOrListConstructor(STNode openBracket, List<STNode> members, STNode closeBracket, boolean isRoot) { STNode lbpOrListCons; switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: case CLOSE_BRACKET_TOKEN: if (!isRoot) { endContext(); return new STAmbiguousCollectionNode(SyntaxKind.LIST_BP_OR_LIST_CONSTRUCTOR, openBracket, members, closeBracket); } default: if (isValidExprRhsStart(peek().kind, closeBracket.kind)) { members = getExpressionList(members); STNode memberExpressions = STNodeFactory.createNodeList(members); lbpOrListCons = STNodeFactory.createListConstructorExpressionNode(openBracket, memberExpressions, closeBracket); break; } members = getBindingPatternsList(members); STNode bindingPatternsNode = STNodeFactory.createNodeList(members); STNode restBindingPattern = STNodeFactory.createEmptyNode(); lbpOrListCons = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern, closeBracket); break; } endContext(); if (!isRoot) { return lbpOrListCons; } return parseStmtStartsWithTypedBPOrExprRhs(null, lbpOrListCons); } private STNode parseMemberRhsInStmtStartWithBrace(STNode identifier, STNode colon, STNode secondIdentifier) { STNode typedBPOrExpr = parseTypedBindingPatternOrMemberAccess(secondIdentifier, false, true, ParserRuleContext.AMBIGUOUS_STMT); if (isExpression(typedBPOrExpr.kind)) { return parseMemberWithExprInRhs(identifier, colon, secondIdentifier, typedBPOrExpr); } switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.VAR_DECL_STMT); STNode finalKeyword = STNodeFactory.createEmptyNode(); STNode annots = STNodeFactory.createEmptyNodeList(); STNode qualifiedNameRef = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier); STNode typeDesc = mergeQualifiedNameWithTypeDesc(qualifiedNameRef, ((STTypedBindingPatternNode) typedBPOrExpr).typeDescriptor); return parseVarDeclRhs(annots, finalKeyword, typeDesc, false); } /** * Parse a member that starts with "foo:bar[", in a statement starting with a brace. * * @param identifier First identifier of the statement * @param colon Colon that follows the first identifier * @param secondIdentifier Identifier that follows the colon * @param memberAccessExpr Member access expression * @return Parsed node */ private STNode parseMemberWithExprInRhs(STNode identifier, STNode colon, STNode secondIdentifier, STNode memberAccessExpr) { STNode expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, true); switch (peek().kind) { case COMMA_TOKEN: case CLOSE_BRACE_TOKEN: switchContext(ParserRuleContext.EXPRESSION_STATEMENT); startContext(ParserRuleContext.MAPPING_CONSTRUCTOR); STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, expr); case EQUAL_TOKEN: case SEMICOLON_TOKEN: default: switchContext(ParserRuleContext.BLOCK_STMT); startContext(ParserRuleContext.EXPRESSION_STATEMENT); STNode qualifiedName = STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, secondIdentifier); STNode updatedExpr = mergeQualifiedNameWithExpr(qualifiedName, expr); return parseStatementStartWithExprRhs(updatedExpr); } } /** * Replace the first identifier of an expression, with a given qualified-identifier. * Only expressions that can start with "bar[..]" can reach here. * * @param qualifiedName Qualified identifier to replace simple identifier * @param exprOrAction Expression or action * @return Updated expression */ private STNode mergeQualifiedNameWithExpr(STNode qualifiedName, STNode exprOrAction) { switch (exprOrAction.kind) { case SIMPLE_NAME_REFERENCE: return qualifiedName; case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) exprOrAction; STNode newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, binaryExpr.lhsExpr); return STNodeFactory.createBinaryExpressionNode(binaryExpr.kind, newLhsExpr, binaryExpr.operator, binaryExpr.rhsExpr); case FIELD_ACCESS: STFieldAccessExpressionNode fieldAccess = (STFieldAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, fieldAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, fieldAccess.dotToken, fieldAccess.fieldName); case INDEXED_EXPRESSION: STIndexedExpressionNode memberAccess = (STIndexedExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, memberAccess.containerExpression); return STNodeFactory.createIndexedExpressionNode(newLhsExpr, memberAccess.openBracket, memberAccess.keyExpression, memberAccess.closeBracket); case TYPE_TEST_EXPRESSION: STTypeTestExpressionNode typeTest = (STTypeTestExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, typeTest.expression); return STNodeFactory.createTypeTestExpressionNode(newLhsExpr, typeTest.isKeyword, typeTest.typeDescriptor); case ANNOT_ACCESS: STAnnotAccessExpressionNode annotAccess = (STAnnotAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, annotAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, annotAccess.annotChainingToken, annotAccess.annotTagReference); case OPTIONAL_FIELD_ACCESS: STOptionalFieldAccessExpressionNode optionalFieldAccess = (STOptionalFieldAccessExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, optionalFieldAccess.expression); return STNodeFactory.createFieldAccessExpressionNode(newLhsExpr, optionalFieldAccess.optionalChainingToken, optionalFieldAccess.fieldName); case CONDITIONAL_EXPRESSION: STConditionalExpressionNode conditionalExpr = (STConditionalExpressionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, conditionalExpr.lhsExpression); return STNodeFactory.createConditionalExpressionNode(newLhsExpr, conditionalExpr.questionMarkToken, conditionalExpr.middleExpression, conditionalExpr.colonToken, conditionalExpr.endExpression); case REMOTE_METHOD_CALL_ACTION: STRemoteMethodCallActionNode remoteCall = (STRemoteMethodCallActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, remoteCall.expression); return STNodeFactory.createRemoteMethodCallActionNode(newLhsExpr, remoteCall.rightArrowToken, remoteCall.methodName, remoteCall.openParenToken, remoteCall.arguments, remoteCall.closeParenToken); case ASYNC_SEND_ACTION: STAsyncSendActionNode asyncSend = (STAsyncSendActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, asyncSend.expression); return STNodeFactory.createAsyncSendActionNode(newLhsExpr, asyncSend.rightArrowToken, asyncSend.peerWorker); case SYNC_SEND_ACTION: STSyncSendActionNode syncSend = (STSyncSendActionNode) exprOrAction; newLhsExpr = mergeQualifiedNameWithExpr(qualifiedName, syncSend.expression); return STNodeFactory.createAsyncSendActionNode(newLhsExpr, syncSend.syncSendToken, syncSend.peerWorker); default: return exprOrAction; } } private STNode mergeQualifiedNameWithTypeDesc(STNode qualifiedName, STNode typeDesc) { switch (typeDesc.kind) { case SIMPLE_NAME_REFERENCE: return qualifiedName; case ARRAY_TYPE_DESC: STArrayTypeDescriptorNode arrayTypeDesc = (STArrayTypeDescriptorNode) typeDesc; STNode newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, arrayTypeDesc.memberTypeDesc); return createArrayTypeDesc(newMemberType, arrayTypeDesc.openBracket, arrayTypeDesc.arrayLength, arrayTypeDesc.closeBracket); case UNION_TYPE_DESC: STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDesc; STNode newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, unionTypeDesc.leftTypeDesc); return createUnionTypeDesc(newlhsType, unionTypeDesc.pipeToken, unionTypeDesc.rightTypeDesc); case INTERSECTION_TYPE_DESC: STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDesc; newlhsType = mergeQualifiedNameWithTypeDesc(qualifiedName, intersectionTypeDesc.leftTypeDesc); return createUnionTypeDesc(newlhsType, intersectionTypeDesc.bitwiseAndToken, intersectionTypeDesc.rightTypeDesc); case OPTIONAL_TYPE_DESC: STOptionalTypeDescriptorNode optionalType = (STOptionalTypeDescriptorNode) typeDesc; newMemberType = mergeQualifiedNameWithTypeDesc(qualifiedName, optionalType.typeDescriptor); return STNodeFactory.createOptionalTypeDescriptorNode(newMemberType, optionalType.questionMarkToken); default: return typeDesc; } } private List<STNode> getTypeDescList(List<STNode> ambiguousList) { List<STNode> typeDescList = new ArrayList<>(); for (STNode item : ambiguousList) { typeDescList.add(getTypeDescFromExpr(item)); } return typeDescList; } /** * Create a type-desc out of an expression. * * @param expression Expression * @return Type descriptor */ private STNode getTypeDescFromExpr(STNode expression) { switch (expression.kind) { case INDEXED_EXPRESSION: return parseArrayTypeDescriptorNode((STIndexedExpressionNode) expression); case NUMERIC_LITERAL: case BOOLEAN_LITERAL: case STRING_LITERAL: case NULL_LITERAL: return STNodeFactory.createSingletonTypeDescriptorNode(expression); case TYPE_REFERENCE_TYPE_DESC: return ((STTypeReferenceTypeDescNode) expression).typeRef; case BRACED_EXPRESSION: STBracedExpressionNode bracedExpr = (STBracedExpressionNode) expression; STNode typeDesc = getTypeDescFromExpr(bracedExpr.expression); return STNodeFactory.createParenthesisedTypeDescriptorNode(bracedExpr.openParen, typeDesc, bracedExpr.closeParen); case NIL_LITERAL: STNilLiteralNode nilLiteral = (STNilLiteralNode) expression; return STNodeFactory.createNilTypeDescriptorNode(nilLiteral.openParenToken, nilLiteral.closeParenToken); case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) expression; STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(innerList.members)); return STNodeFactory.createTupleTypeDescriptorNode(innerList.collectionStartToken, memberTypeDescs, innerList.collectionEndToken); case BINARY_EXPRESSION: STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) expression; switch (binaryExpr.operator.kind) { case PIPE_TOKEN: STNode lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr); STNode rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr); return createUnionTypeDesc(lhsTypeDesc, binaryExpr.operator, rhsTypeDesc); case BITWISE_AND_TOKEN: lhsTypeDesc = getTypeDescFromExpr(binaryExpr.lhsExpr); rhsTypeDesc = getTypeDescFromExpr(binaryExpr.rhsExpr); return createIntersectionTypeDesc(lhsTypeDesc, binaryExpr.operator, rhsTypeDesc); default: break; } return expression; case UNARY_EXPRESSION: return STNodeFactory.createSingletonTypeDescriptorNode(expression); case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: default: return expression; } } private List<STNode> getBindingPatternsList(List<STNode> ambibuousList) { List<STNode> bindingPatterns = new ArrayList<STNode>(); for (STNode item : ambibuousList) { bindingPatterns.add(getBindingPattern(item)); } return bindingPatterns; } private STNode getBindingPattern(STNode ambiguousNode) { if (isEmpty(ambiguousNode)) { return ambiguousNode; } switch (ambiguousNode.kind) { case SIMPLE_NAME_REFERENCE: STNode varName = ((STSimpleNameReferenceNode) ambiguousNode).name; return createCaptureOrWildcardBP(varName); case QUALIFIED_NAME_REFERENCE: STQualifiedNameReferenceNode qualifiedName = (STQualifiedNameReferenceNode) ambiguousNode; STNode fieldName = STNodeFactory.createSimpleNameReferenceNode(qualifiedName.modulePrefix); return STNodeFactory.createFieldBindingPatternFullNode(fieldName, qualifiedName.colon, getBindingPattern(qualifiedName.identifier)); case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode; STNode memberBindingPatterns = STNodeFactory.createNodeList(getBindingPatternsList(innerList.members)); STNode restBindingPattern = STNodeFactory.createEmptyNode(); return STNodeFactory.createListBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns, restBindingPattern, innerList.collectionEndToken); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: innerList = (STAmbiguousCollectionNode) ambiguousNode; List<STNode> bindingPatterns = new ArrayList<>(); restBindingPattern = STNodeFactory.createEmptyNode(); for (int i = 0; i < innerList.members.size(); i++) { STNode bp = getBindingPattern(innerList.members.get(i)); if (bp.kind == SyntaxKind.REST_BINDING_PATTERN) { restBindingPattern = bp; break; } bindingPatterns.add(bp); } memberBindingPatterns = STNodeFactory.createNodeList(bindingPatterns); return STNodeFactory.createMappingBindingPatternNode(innerList.collectionStartToken, memberBindingPatterns, restBindingPattern, innerList.collectionEndToken); case SPECIFIC_FIELD: STSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode; fieldName = STNodeFactory.createSimpleNameReferenceNode(field.fieldName); if (field.valueExpr == null) { return STNodeFactory.createFieldBindingPatternVarnameNode(fieldName); } return STNodeFactory.createFieldBindingPatternFullNode(fieldName, field.colon, getBindingPattern(field.valueExpr)); case FUNCTION_CALL: STFunctionCallExpressionNode funcCall = (STFunctionCallExpressionNode) ambiguousNode; STNode args = funcCall.arguments; int size = args.bucketCount(); bindingPatterns = new ArrayList<>(); for (int i = 0; i < size; i++) { STNode arg = args.childInBucket(i); bindingPatterns.add(getBindingPattern(arg)); } STNode argListBindingPatterns = STNodeFactory.createNodeList(bindingPatterns); STNode errorKeyword; STNode typeRef; if (funcCall.functionName.kind == SyntaxKind.ERROR_TYPE_DESC) { errorKeyword = funcCall.functionName; typeRef = STNodeFactory.createEmptyNode(); } else { errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD); typeRef = funcCall.functionName; } return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, funcCall.openParenToken, argListBindingPatterns, funcCall.closeParenToken); case POSITIONAL_ARG: STPositionalArgumentNode positionalArg = (STPositionalArgumentNode) ambiguousNode; return getBindingPattern(positionalArg.expression); case NAMED_ARG: STNamedArgumentNode namedArg = (STNamedArgumentNode) ambiguousNode; return STNodeFactory.createNamedArgBindingPatternNode(namedArg.argumentName, namedArg.equalsToken, getBindingPattern(namedArg.expression)); case REST_ARG: STRestArgumentNode restArg = (STRestArgumentNode) ambiguousNode; return STNodeFactory.createRestBindingPatternNode(restArg.ellipsis, restArg.expression); default: return ambiguousNode; } } private List<STNode> getExpressionList(List<STNode> ambibuousList) { List<STNode> exprList = new ArrayList<STNode>(); for (STNode item : ambibuousList) { exprList.add(getExpression(item)); } return exprList; } private STNode getExpression(STNode ambiguousNode) { if (isEmpty(ambiguousNode)) { return ambiguousNode; } switch (ambiguousNode.kind) { case BRACKETED_LIST: case LIST_BP_OR_LIST_CONSTRUCTOR: case TUPLE_TYPE_DESC_OR_LIST_CONST: STAmbiguousCollectionNode innerList = (STAmbiguousCollectionNode) ambiguousNode; STNode memberExprs = STNodeFactory.createNodeList(getExpressionList(innerList.members)); return STNodeFactory.createListConstructorExpressionNode(innerList.collectionStartToken, memberExprs, innerList.collectionEndToken); case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: innerList = (STAmbiguousCollectionNode) ambiguousNode; List<STNode> fieldList = new ArrayList<>(); for (int i = 0; i < innerList.members.size(); i++) { STNode field = innerList.members.get(i); STNode fieldNode; if (field.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) { STQualifiedNameReferenceNode qualifiedNameRefNode = (STQualifiedNameReferenceNode) field; STNode readOnlyKeyword = STNodeFactory.createEmptyNode(); STNode fieldName = qualifiedNameRefNode.modulePrefix; STNode colon = qualifiedNameRefNode.colon; STNode valueExpr = getExpression(qualifiedNameRefNode.identifier); fieldNode = STNodeFactory.createSpecificFieldNode(readOnlyKeyword, fieldName, colon, valueExpr); } else { fieldNode = getExpression(field); } fieldList.add(fieldNode); } STNode fields = STNodeFactory.createNodeList(fieldList); return STNodeFactory.createMappingConstructorExpressionNode(innerList.collectionStartToken, fields, innerList.collectionEndToken); case REST_BINDING_PATTERN: STRestBindingPatternNode restBindingPattern = (STRestBindingPatternNode) ambiguousNode; return STNodeFactory.createSpreadFieldNode(restBindingPattern.ellipsisToken, restBindingPattern.variableName); case SPECIFIC_FIELD: STSpecificFieldNode field = (STSpecificFieldNode) ambiguousNode; return STNodeFactory.createSpecificFieldNode(field.readonlyKeyword, field.fieldName, field.colon, getExpression(field.valueExpr)); case SIMPLE_NAME_REFERENCE: case QUALIFIED_NAME_REFERENCE: default: return ambiguousNode; } } private STNode getMappingField(STNode identifier, STNode colon, STNode bindingPatternOrExpr) { STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(identifier); switch (bindingPatternOrExpr.kind) { case LIST_BINDING_PATTERN: case MAPPING_BINDING_PATTERN: return STNodeFactory.createFieldBindingPatternFullNode(simpleNameRef, colon, bindingPatternOrExpr); case LIST_CONSTRUCTOR: case MAPPING_CONSTRUCTOR: STNode readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, simpleNameRef, colon, identifier); case LIST_BP_OR_LIST_CONSTRUCTOR: case MAPPING_BP_OR_MAPPING_CONSTRUCTOR: default: readonlyKeyword = STNodeFactory.createEmptyNode(); return STNodeFactory.createSpecificFieldNode(readonlyKeyword, identifier, colon, bindingPatternOrExpr); } } }
class member, object member or object member descriptor. * </p> * <code> * class-member := object-field | method-defn | object-type-inclusion * <br/> * object-member := object-field | method-defn * <br/> * object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion * </code> * * @param context Parsing context of the object member * @return Parsed node */ private STNode parseObjectMember(ParserRuleContext context) { STNode metadata; STToken nextToken = peek(); switch (nextToken.kind) { case EOF_TOKEN: case CLOSE_BRACE_TOKEN: return null; case ASTERISK_TOKEN: case PUBLIC_KEYWORD: case PRIVATE_KEYWORD: case REMOTE_KEYWORD: case FUNCTION_KEYWORD: case TRANSACTIONAL_KEYWORD: case RESOURCE_KEYWORD: metadata = STNodeFactory.createEmptyNode(); break; case DOCUMENTATION_STRING: case AT_TOKEN: metadata = parseMetaData(); break; default: if (isTypeStartingToken(nextToken.kind)) { metadata = STNodeFactory.createEmptyNode(); break; } ParserRuleContext recoveryCtx; if (context == ParserRuleContext.OBJECT_MEMBER) { recoveryCtx = ParserRuleContext.OBJECT_MEMBER_START; } else { recoveryCtx = ParserRuleContext.CLASS_MEMBER_START; } recover(peek(), recoveryCtx); return parseObjectMember(context); } return parseObjectMemberWithoutMeta(metadata, context); }
Because `or expansion` could generate new CTE, it may break the assumption that the CTE anchor always is at the top of the plan in the rewritten CTE children
private static List<RewriteJob> getWholeTreeRewriteJobs(List<RewriteJob> jobs) { return jobs( topic("cte inline and pull up all cte anchor", custom(RuleType.PULL_UP_CTE_ANCHOR, PullUpCteAnchor::new), custom(RuleType.CTE_INLINE, CTEInline::new) ), topic("process limit session variables", custom(RuleType.ADD_DEFAULT_LIMIT, AddDefaultLimit::new) ), topic("rewrite cte sub-tree", custom(RuleType.REWRITE_CTE_CHILDREN, () -> new RewriteCteChildren(jobs)) ), topic("or expansion", topDown(new OrExpansion())), topic("whole plan check", custom(RuleType.ADJUST_NULLABLE, AdjustNullable::new) ) ); }
topic("or expansion",
private static List<RewriteJob> getWholeTreeRewriteJobs(List<RewriteJob> jobs) { return jobs( topic("cte inline and pull up all cte anchor", custom(RuleType.PULL_UP_CTE_ANCHOR, PullUpCteAnchor::new), custom(RuleType.CTE_INLINE, CTEInline::new) ), topic("process limit session variables", custom(RuleType.ADD_DEFAULT_LIMIT, AddDefaultLimit::new) ), topic("rewrite cte sub-tree", custom(RuleType.REWRITE_CTE_CHILDREN, () -> new RewriteCteChildren(jobs)) ), topic("or expansion", topDown(new OrExpansion())), topic("whole plan check", custom(RuleType.ADJUST_NULLABLE, AdjustNullable::new) ) ); }
class Rewriter extends AbstractBatchJobExecutor { private static final List<RewriteJob> CTE_CHILDREN_REWRITE_JOBS = jobs( topic("Plan Normalization", topDown( new EliminateOrderByConstant(), new EliminateSortUnderSubquery(), new EliminateGroupByConstant(), new LogicalSubQueryAliasToLogicalProject(), new ExpressionNormalization(), new ExpressionOptimization(), new AvgDistinctToSumDivCount(), new CountDistinctRewrite(), new ExtractFilterFromCrossJoin() ), topDown( new ExtractSingleTableExpressionFromDisjunction() ) ), topic("Subquery unnesting", bottomUp(new PullUpProjectUnderApply()), topDown(new PushDownFilterThroughProject()), custom(RuleType.AGG_SCALAR_SUBQUERY_TO_WINDOW_FUNCTION, AggScalarSubQueryToWindowFunction::new), bottomUp( new EliminateUselessPlanUnderApply(), new MergeProjects(), /* * Subquery unnesting. * 1. Adjust the plan in correlated logicalApply * so that there are no correlated columns in the subquery. * 2. Convert logicalApply to a logicalJoin. * TODO: group these rules to make sure the result plan is what we expected. */ new CorrelateApplyToUnCorrelateApply(), new ApplyToJoin() ) ), topic("Eliminate optimization", bottomUp( new EliminateLimit(), new EliminateFilter(), new EliminateAggregate(), new EliminateJoinCondition(), new EliminateAssertNumRows() ) ), topDown(new AdjustAggregateNullableForEmptySet()), topDown( new SimplifyAggGroupBy(), new NormalizeAggregate(), new CountLiteralRewrite(), new NormalizeSort() ), topic("Window analysis", topDown( new ExtractAndNormalizeWindowExpression(), new CheckAndStandardizeWindowFunctionAndFrame() ) ), topic("Rewrite join", topDown( new InferAggNotNull(), new InferFilterNotNull(), new InferJoinNotNull() ), bottomUp(RuleSet.PUSH_DOWN_FILTERS), topDown( new MergeFilters(), new ReorderJoin(), new PushFilterInsideJoin(), new FindHashConditionForJoin(), new ConvertInnerOrCrossJoin(), new EliminateNullAwareLeftAntiJoin() ), bottomUp( new JoinCommute(), new TransposeSemiJoinLogicalJoin(), new TransposeSemiJoinLogicalJoinProject(), new TransposeSemiJoinAgg(), new TransposeSemiJoinAggProject() ), topDown( new EliminateDedupJoinCondition() ), bottomUp(new EliminateNotNull()), topDown(new ConvertInnerOrCrossJoin()) ), topic("Column pruning and infer predicate", custom(RuleType.COLUMN_PRUNING, ColumnPruning::new), custom(RuleType.INFER_PREDICATES, InferPredicates::new), bottomUp(RuleSet.PUSH_DOWN_FILTERS), custom(RuleType.INFER_PREDICATES, InferPredicates::new), bottomUp(RuleSet.PUSH_DOWN_FILTERS), topDown(new PushFilterInsideJoin()), topDown(new ExpressionNormalization()) ), custom(RuleType.ELIMINATE_UNNECESSARY_PROJECT, EliminateUnnecessaryProject::new), topic("Set operation optimization", topDown(new PushProjectThroughUnion(), new MergeProjects()), bottomUp(new MergeSetOperations()), bottomUp(new PushProjectIntoOneRowRelation()), topDown(new MergeOneRowRelationIntoUnion()), topDown(new PushProjectIntoUnion()), costBased(topDown(new InferSetOperatorDistinct())), topDown(new BuildAggForUnion()) ), topic("Eager aggregation", topDown( new PushDownSumThroughJoin(), new PushDownMinMaxThroughJoin(), new PushDownCountThroughJoin() ), topDown( new PushDownSumThroughJoinOneSide(), new PushDownCountThroughJoinOneSide() ), custom(RuleType.PUSH_DOWN_DISTINCT_THROUGH_JOIN, PushDownDistinctThroughJoin::new) ), topic("Limit optimization", topDown(new LimitSortToTopN()), topDown(new SplitLimit()), topDown( new PushDownLimit(), new PushDownTopNThroughJoin(), new PushDownLimitDistinctThroughJoin(), new PushDownLimitDistinctThroughUnion(), new PushDownTopNThroughWindow(), new PushDownTopNThroughUnion() ), topDown(new CreatePartitionTopNFromWindow()), topDown( new PullUpProjectUnderTopN(), new PullUpProjectUnderLimit() ) ), topic("Table/Physical optimization", topDown( new PruneOlapScanPartition(), new PruneEmptyPartition(), new PruneFileScanPartition(), new PushConjunctsIntoJdbcScan(), new PushConjunctsIntoEsScan() ) ), topic("MV optimization", topDown( new SelectMaterializedIndexWithAggregate(), new SelectMaterializedIndexWithoutAggregate(), new EliminateFilter(), new PushDownFilterThroughProject(), new MergeProjects(), new PruneOlapScanTablet() ), custom(RuleType.COLUMN_PRUNING, ColumnPruning::new), bottomUp(RuleSet.PUSH_DOWN_FILTERS), custom(RuleType.ELIMINATE_UNNECESSARY_PROJECT, EliminateUnnecessaryProject::new) ), topic("topn optimize", topDown(new DeferMaterializeTopNResult()) ), topic("eliminate", custom(RuleType.ELIMINATE_SORT, EliminateSort::new), bottomUp(new EliminateEmptyRelation()) ), topic("Final rewrite and check", custom(RuleType.CHECK_DATA_TYPES, CheckDataTypes::new), custom(RuleType.ENSURE_PROJECT_ON_TOP_JOIN, EnsureProjectOnTopJoin::new), topDown(new PushDownFilterThroughProject(), new MergeProjects()), custom(RuleType.ADJUST_CONJUNCTS_RETURN_TYPE, AdjustConjunctsReturnType::new), bottomUp( new ExpressionRewrite(CheckLegalityAfterRewrite.INSTANCE), new CheckMatchExpression(), new CheckMultiDistinct(), new CheckAfterRewrite() ) ), topic("Push project and filter on cte consumer to cte producer", topDown( new CollectFilterAboveConsumer(), new CollectProjectAboveConsumer() ) ) ); private static final List<RewriteJob> WHOLE_TREE_REWRITE_JOBS = getWholeTreeRewriteJobs(true); private static final List<RewriteJob> WHOLE_TREE_REWRITE_JOBS_WITHOUT_COST_BASED = getWholeTreeRewriteJobs(false); private final List<RewriteJob> rewriteJobs; private Rewriter(CascadesContext cascadesContext, List<RewriteJob> rewriteJobs) { super(cascadesContext); this.rewriteJobs = rewriteJobs; } public static Rewriter getWholeTreeRewriterWithoutCostBasedJobs(CascadesContext cascadesContext) { return new Rewriter(cascadesContext, WHOLE_TREE_REWRITE_JOBS_WITHOUT_COST_BASED); } public static Rewriter getWholeTreeRewriter(CascadesContext cascadesContext) { return new Rewriter(cascadesContext, WHOLE_TREE_REWRITE_JOBS); } public static Rewriter getCteChildrenRewriter(CascadesContext cascadesContext, List<RewriteJob> jobs) { return new Rewriter(cascadesContext, jobs); } public static Rewriter getWholeTreeRewriterWithCustomJobs(CascadesContext cascadesContext, List<RewriteJob> jobs) { return new Rewriter(cascadesContext, getWholeTreeRewriteJobs(jobs)); } private static List<RewriteJob> getWholeTreeRewriteJobs(boolean withCostBased) { List<RewriteJob> withoutCostBased = Rewriter.CTE_CHILDREN_REWRITE_JOBS.stream() .filter(j -> !(j instanceof CostBasedRewriteJob)) .collect(Collectors.toList()); return getWholeTreeRewriteJobs(withCostBased ? CTE_CHILDREN_REWRITE_JOBS : withoutCostBased); } @Override public List<RewriteJob> getJobs() { return rewriteJobs; } }
class Rewriter extends AbstractBatchJobExecutor { private static final List<RewriteJob> CTE_CHILDREN_REWRITE_JOBS = jobs( topic("Plan Normalization", topDown( new EliminateOrderByConstant(), new EliminateSortUnderSubquery(), new EliminateGroupByConstant(), new LogicalSubQueryAliasToLogicalProject(), new ExpressionNormalization(), new ExpressionOptimization(), new AvgDistinctToSumDivCount(), new CountDistinctRewrite(), new ExtractFilterFromCrossJoin() ), topDown( new ExtractSingleTableExpressionFromDisjunction() ) ), topic("Subquery unnesting", bottomUp(new PullUpProjectUnderApply()), topDown(new PushDownFilterThroughProject()), custom(RuleType.AGG_SCALAR_SUBQUERY_TO_WINDOW_FUNCTION, AggScalarSubQueryToWindowFunction::new), bottomUp( new EliminateUselessPlanUnderApply(), new MergeProjects(), /* * Subquery unnesting. * 1. Adjust the plan in correlated logicalApply * so that there are no correlated columns in the subquery. * 2. Convert logicalApply to a logicalJoin. * TODO: group these rules to make sure the result plan is what we expected. */ new CorrelateApplyToUnCorrelateApply(), new ApplyToJoin() ) ), topic("Eliminate optimization", bottomUp( new EliminateLimit(), new EliminateFilter(), new EliminateAggregate(), new EliminateJoinCondition(), new EliminateAssertNumRows() ) ), topDown(new AdjustAggregateNullableForEmptySet()), topDown( new SimplifyAggGroupBy(), new NormalizeAggregate(), new CountLiteralRewrite(), new NormalizeSort() ), topic("Window analysis", topDown( new ExtractAndNormalizeWindowExpression(), new CheckAndStandardizeWindowFunctionAndFrame() ) ), topic("Rewrite join", topDown( new InferAggNotNull(), new InferFilterNotNull(), new InferJoinNotNull() ), bottomUp(RuleSet.PUSH_DOWN_FILTERS), topDown( new MergeFilters(), new ReorderJoin(), new PushFilterInsideJoin(), new FindHashConditionForJoin(), new ConvertInnerOrCrossJoin(), new EliminateNullAwareLeftAntiJoin() ), bottomUp( new JoinCommute(), new TransposeSemiJoinLogicalJoin(), new TransposeSemiJoinLogicalJoinProject(), new TransposeSemiJoinAgg(), new TransposeSemiJoinAggProject() ), topDown( new EliminateDedupJoinCondition() ), bottomUp(new EliminateNotNull()), topDown(new ConvertInnerOrCrossJoin()) ), topic("Column pruning and infer predicate", custom(RuleType.COLUMN_PRUNING, ColumnPruning::new), custom(RuleType.INFER_PREDICATES, InferPredicates::new), bottomUp(RuleSet.PUSH_DOWN_FILTERS), custom(RuleType.INFER_PREDICATES, InferPredicates::new), bottomUp(RuleSet.PUSH_DOWN_FILTERS), topDown(new PushFilterInsideJoin()), topDown(new ExpressionNormalization()) ), custom(RuleType.ELIMINATE_UNNECESSARY_PROJECT, EliminateUnnecessaryProject::new), topic("Set operation optimization", topDown(new PushProjectThroughUnion(), new MergeProjects()), bottomUp(new MergeSetOperations()), bottomUp(new PushProjectIntoOneRowRelation()), topDown(new MergeOneRowRelationIntoUnion()), topDown(new PushProjectIntoUnion()), costBased(topDown(new InferSetOperatorDistinct())), topDown(new BuildAggForUnion()) ), topic("Eager aggregation", topDown( new PushDownSumThroughJoin(), new PushDownMinMaxThroughJoin(), new PushDownCountThroughJoin() ), topDown( new PushDownSumThroughJoinOneSide(), new PushDownCountThroughJoinOneSide() ), custom(RuleType.PUSH_DOWN_DISTINCT_THROUGH_JOIN, PushDownDistinctThroughJoin::new) ), topic("Limit optimization", topDown(new LimitSortToTopN()), topDown(new SplitLimit()), topDown( new PushDownLimit(), new PushDownTopNThroughJoin(), new PushDownLimitDistinctThroughJoin(), new PushDownLimitDistinctThroughUnion(), new PushDownTopNThroughWindow(), new PushDownTopNThroughUnion() ), topDown(new CreatePartitionTopNFromWindow()), topDown( new PullUpProjectUnderTopN(), new PullUpProjectUnderLimit() ) ), topic("Table/Physical optimization", topDown( new PruneOlapScanPartition(), new PruneEmptyPartition(), new PruneFileScanPartition(), new PushConjunctsIntoJdbcScan(), new PushConjunctsIntoEsScan() ) ), topic("MV optimization", topDown( new SelectMaterializedIndexWithAggregate(), new SelectMaterializedIndexWithoutAggregate(), new EliminateFilter(), new PushDownFilterThroughProject(), new MergeProjects(), new PruneOlapScanTablet() ), custom(RuleType.COLUMN_PRUNING, ColumnPruning::new), bottomUp(RuleSet.PUSH_DOWN_FILTERS), custom(RuleType.ELIMINATE_UNNECESSARY_PROJECT, EliminateUnnecessaryProject::new) ), topic("topn optimize", topDown(new DeferMaterializeTopNResult()) ), topic("eliminate", custom(RuleType.ELIMINATE_SORT, EliminateSort::new), bottomUp(new EliminateEmptyRelation()) ), topic("Final rewrite and check", custom(RuleType.CHECK_DATA_TYPES, CheckDataTypes::new), custom(RuleType.ENSURE_PROJECT_ON_TOP_JOIN, EnsureProjectOnTopJoin::new), topDown(new PushDownFilterThroughProject(), new MergeProjects()), custom(RuleType.ADJUST_CONJUNCTS_RETURN_TYPE, AdjustConjunctsReturnType::new), bottomUp( new ExpressionRewrite(CheckLegalityAfterRewrite.INSTANCE), new CheckMatchExpression(), new CheckMultiDistinct(), new CheckAfterRewrite() ) ), topic("Push project and filter on cte consumer to cte producer", topDown( new CollectFilterAboveConsumer(), new CollectProjectAboveConsumer() ) ) ); private static final List<RewriteJob> WHOLE_TREE_REWRITE_JOBS = getWholeTreeRewriteJobs(true); private static final List<RewriteJob> WHOLE_TREE_REWRITE_JOBS_WITHOUT_COST_BASED = getWholeTreeRewriteJobs(false); private final List<RewriteJob> rewriteJobs; private Rewriter(CascadesContext cascadesContext, List<RewriteJob> rewriteJobs) { super(cascadesContext); this.rewriteJobs = rewriteJobs; } public static Rewriter getWholeTreeRewriterWithoutCostBasedJobs(CascadesContext cascadesContext) { return new Rewriter(cascadesContext, WHOLE_TREE_REWRITE_JOBS_WITHOUT_COST_BASED); } public static Rewriter getWholeTreeRewriter(CascadesContext cascadesContext) { return new Rewriter(cascadesContext, WHOLE_TREE_REWRITE_JOBS); } public static Rewriter getCteChildrenRewriter(CascadesContext cascadesContext, List<RewriteJob> jobs) { return new Rewriter(cascadesContext, jobs); } public static Rewriter getWholeTreeRewriterWithCustomJobs(CascadesContext cascadesContext, List<RewriteJob> jobs) { return new Rewriter(cascadesContext, getWholeTreeRewriteJobs(jobs)); } private static List<RewriteJob> getWholeTreeRewriteJobs(boolean withCostBased) { List<RewriteJob> withoutCostBased = Rewriter.CTE_CHILDREN_REWRITE_JOBS.stream() .filter(j -> !(j instanceof CostBasedRewriteJob)) .collect(Collectors.toList()); return getWholeTreeRewriteJobs(withCostBased ? CTE_CHILDREN_REWRITE_JOBS : withoutCostBased); } @Override public List<RewriteJob> getJobs() { return rewriteJobs; } }
```suggestion LocalVariableProxyImpl selfVariable = stackFrame.visibleVariableByName(SELF_VAR_NAME); ```
private static String getFilteredStackFrame(StackFrameProxyImpl stackFrame) throws JdiProxyException { String stackFrameName = stackFrame.location().method().name(); LocalVariableProxyImpl selfVisibleVariable = stackFrame.visibleVariableByName(SELF_VAR_NAME); if (selfVisibleVariable == null) { return stackFrameName; } Value stackFrameValue = stackFrame.getValue(selfVisibleVariable); if (isService(stackFrameValue)) { if (stackFrameName.equals(METHOD_INIT)) { return BVariableType.SERVICE.getString(); } else { String[] stackFrameNameParts = stackFrameName.split("\\$"); if (!stackFrameNameParts[1].isEmpty() && stackFrameNameParts[1].equals(ACCESSOR_DEFAULT)) { return ACCESSOR_DEFAULT; } return !stackFrameNameParts[2].isEmpty() ? stackFrameNameParts[2] : stackFrameName; } } return stackFrameName; }
LocalVariableProxyImpl selfVisibleVariable = stackFrame.visibleVariableByName(SELF_VAR_NAME);
private static String getFilteredStackFrame(StackFrameProxyImpl stackFrame) throws JdiProxyException { String stackFrameName = stackFrame.location().method().name(); LocalVariableProxyImpl selfVariable = stackFrame.visibleVariableByName(SELF_VAR_NAME); if (selfVariable == null) { return stackFrameName; } Value selfValue = stackFrame.getValue(selfVariable); if (isService(selfValue)) { if (stackFrameName.equals(METHOD_INIT)) { return BVariableType.SERVICE.getString(); } else { String[] stackFrameNameParts = stackFrameName.split("\\$"); if (stackFrameNameParts.length > 1 && stackFrameNameParts[1].equals(ACCESSOR_DEFAULT)) { return stackFrameNameParts[1]; } return stackFrameNameParts.length > 2 ? stackFrameNameParts[2] : stackFrameName; } } return stackFrameName; }
class BallerinaStackFrame { private final ExecutionContext context; private final Integer frameId; private final StackFrameProxyImpl jStackFrame; private StackFrame dapStackFrame; private static final String STRAND_FIELD_NAME = "name"; private static final String FRAME_TYPE_START = "start"; private static final String FRAME_TYPE_WORKER = "worker"; private static final String FRAME_TYPE_ANONYMOUS = "anonymous"; private static final String FRAME_SEPARATOR = ":"; private static final String ACCESSOR_DEFAULT = "default"; private static final String METHOD_INIT = "$init$"; private static final String SELF_VAR_NAME = "self"; private static final String WORKER_LAMBDA_REGEX = "(\\$lambda\\$)\\b(.*)\\b(\\$lambda)(.*)"; public BallerinaStackFrame(ExecutionContext context, Integer frameId, StackFrameProxyImpl stackFrameProxy) { this.context = context; this.frameId = frameId; this.jStackFrame = stackFrameProxy; } public StackFrameProxyImpl getJStackFrame() { return jStackFrame; } /** * Returns a debugger adapter protocol compatible instance of this breakpoint. * * @return as an instance of {@link org.eclipse.lsp4j.debug.StackFrame} */ public Optional<StackFrame> getAsDAPStackFrame() { dapStackFrame = Objects.requireNonNullElse(dapStackFrame, computeDapStackFrame()); return Optional.of(dapStackFrame); } private StackFrame computeDapStackFrame() { try { if (!isBalStackFrame(jStackFrame.getStackFrame())) { return null; } StackFrame dapStackFrame = new StackFrame(); dapStackFrame.setId(frameId); dapStackFrame.setName(getStackFrameName(jStackFrame)); dapStackFrame.setLine(jStackFrame.location().lineNumber()); dapStackFrame.setColumn(0); Optional<Path> sourcePath = getSrcPathFromBreakpointLocation(jStackFrame.location(), context.getSourceProject()); if (sourcePath.isPresent()) { Source source = new Source(); source.setPath(sourcePath.get().toString()); source.setName(jStackFrame.location().sourceName()); dapStackFrame.setSource(source); } return dapStackFrame; } catch (Exception e) { return null; } } /** * Derives ballerina stack frame name from the given java stack frame instance. * * @param stackFrame JDI stack frame instance * @return Ballerina stack frame name */ private static String getStackFrameName(StackFrameProxyImpl stackFrame) { try { String frameName; ObjectReference strand = getStrand(stackFrame); if (strand != null) { Value frameNameValue = strand.getValue(strand.referenceType().fieldByName(STRAND_FIELD_NAME)); if (frameNameValue != null) { frameName = removeRedundantQuotes(String.valueOf(frameNameValue)); } else { frameName = FRAME_TYPE_ANONYMOUS; } } else { frameName = FRAME_TYPE_ANONYMOUS; } if (stackFrame.location().method().name().matches(WORKER_LAMBDA_REGEX)) { return FRAME_TYPE_WORKER + FRAME_SEPARATOR + frameName; } else if (stackFrame.location().method().name().contains(LAMBDA)) { return stackFrame.visibleVariableByName(STRAND_VAR_NAME) != null ? frameName : FRAME_TYPE_START + FRAME_SEPARATOR + frameName; } else { return getFilteredStackFrame(stackFrame); } } catch (Exception e) { return FRAME_TYPE_ANONYMOUS; } } /** * Derives filtered ballerina stack frame name from the given java stack frame instance. * * @param stackFrame JDI stack frame instance * @return filtered ballerina stack frame name */ /** * Retrieves ballerina strand instance of the given stack frame. */ private static ObjectReference getStrand(StackFrameProxyImpl frame) { try { if (frame.visibleVariableByName(STRAND_VAR_NAME) == null) { return (ObjectReference) ((ArrayReference) frame.getStackFrame().getArgumentValues().get(0)) .getValue(0); } return (ObjectReference) frame.getValue(frame.visibleVariableByName(STRAND_VAR_NAME)); } catch (Exception e) { return null; } } }
class BallerinaStackFrame { private final ExecutionContext context; private final Integer frameId; private final StackFrameProxyImpl jStackFrame; private StackFrame dapStackFrame; private static final String STRAND_FIELD_NAME = "name"; private static final String FRAME_TYPE_START = "start"; private static final String FRAME_TYPE_WORKER = "worker"; private static final String FRAME_TYPE_ANONYMOUS = "anonymous"; private static final String FRAME_SEPARATOR = ":"; private static final String ACCESSOR_DEFAULT = "default"; private static final String METHOD_INIT = "$init$"; private static final String SELF_VAR_NAME = "self"; private static final String WORKER_LAMBDA_REGEX = "(\\$lambda\\$)\\b(.*)\\b(\\$lambda)(.*)"; public BallerinaStackFrame(ExecutionContext context, Integer frameId, StackFrameProxyImpl stackFrameProxy) { this.context = context; this.frameId = frameId; this.jStackFrame = stackFrameProxy; } public StackFrameProxyImpl getJStackFrame() { return jStackFrame; } /** * Returns a debugger adapter protocol compatible instance of this breakpoint. * * @return as an instance of {@link org.eclipse.lsp4j.debug.StackFrame} */ public Optional<StackFrame> getAsDAPStackFrame() { dapStackFrame = Objects.requireNonNullElse(dapStackFrame, computeDapStackFrame()); return Optional.of(dapStackFrame); } private StackFrame computeDapStackFrame() { try { if (!isBalStackFrame(jStackFrame.getStackFrame())) { return null; } StackFrame dapStackFrame = new StackFrame(); dapStackFrame.setId(frameId); dapStackFrame.setName(getStackFrameName(jStackFrame)); dapStackFrame.setLine(jStackFrame.location().lineNumber()); dapStackFrame.setColumn(0); Optional<Path> sourcePath = getSrcPathFromBreakpointLocation(jStackFrame.location(), context.getSourceProject()); if (sourcePath.isPresent()) { Source source = new Source(); source.setPath(sourcePath.get().toString()); source.setName(jStackFrame.location().sourceName()); dapStackFrame.setSource(source); } return dapStackFrame; } catch (Exception e) { return null; } } /** * Derives ballerina stack frame name from the given java stack frame instance. * * @param stackFrame JDI stack frame instance * @return Ballerina stack frame name */ private static String getStackFrameName(StackFrameProxyImpl stackFrame) { try { String frameName; ObjectReference strand = getStrand(stackFrame); if (strand != null) { Value frameNameValue = strand.getValue(strand.referenceType().fieldByName(STRAND_FIELD_NAME)); if (frameNameValue != null) { frameName = removeRedundantQuotes(String.valueOf(frameNameValue)); } else { frameName = FRAME_TYPE_ANONYMOUS; } } else { frameName = FRAME_TYPE_ANONYMOUS; } if (stackFrame.location().method().name().matches(WORKER_LAMBDA_REGEX)) { return FRAME_TYPE_WORKER + FRAME_SEPARATOR + frameName; } else if (stackFrame.location().method().name().contains(LAMBDA)) { return stackFrame.visibleVariableByName(STRAND_VAR_NAME) != null ? frameName : FRAME_TYPE_START + FRAME_SEPARATOR + frameName; } else { return getFilteredStackFrame(stackFrame); } } catch (Exception e) { return FRAME_TYPE_ANONYMOUS; } } /** * Derives filtered ballerina stack frame name from the given java stack frame instance. * * @param stackFrame JDI stack frame instance * @return filtered ballerina stack frame name */ /** * Retrieves ballerina strand instance of the given stack frame. */ private static ObjectReference getStrand(StackFrameProxyImpl frame) { try { if (frame.visibleVariableByName(STRAND_VAR_NAME) == null) { return (ObjectReference) ((ArrayReference) frame.getStackFrame().getArgumentValues().get(0)) .getValue(0); } return (ObjectReference) frame.getValue(frame.visibleVariableByName(STRAND_VAR_NAME)); } catch (Exception e) { return null; } } }
Why are removing the hyphen? Same for L3599.
public void visit(BLangOnFailClause onFailClause, AnalyzerData data) { SymbolEnv onFailEnv = SymbolEnv.createBlockEnv(onFailClause.body, data.env); if (onFailClause.variableDefinitionNode != null) { handleForeachDefinitionVariables(onFailClause.variableDefinitionNode, symTable.errorType, onFailClause.isDeclaredWithVar, true, onFailEnv); BLangVariable onFailVarNode = (BLangVariable) onFailClause.variableDefinitionNode.getVariable(); if (!types.isAssignable(onFailVarNode.getBType(), symTable.errorType)) { dlog.error(onFailVarNode.pos, DiagnosticErrorCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, onFailVarNode.getBType()); } } data.env = onFailEnv; analyzeStmt(onFailClause.body, data); }
public void visit(BLangOnFailClause onFailClause, AnalyzerData data) { SymbolEnv onFailEnv = SymbolEnv.createBlockEnv(onFailClause.body, data.env); VariableDefinitionNode onFailVarDefNode = onFailClause.variableDefinitionNode; if (onFailVarDefNode != null) { handleForeachDefinitionVariables(onFailVarDefNode, symTable.errorType, onFailClause.isDeclaredWithVar, true, onFailEnv); BLangVariable onFailVarNode = (BLangVariable) onFailVarDefNode.getVariable(); if (!types.isAssignable(onFailVarNode.getBType(), symTable.errorType)) { dlog.error(onFailVarNode.pos, DiagnosticErrorCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, onFailVarNode.getBType()); } } data.env = onFailEnv; analyzeStmt(onFailClause.body, data); }
class representing a service-decl or object-ctor with service prefix AttachPoint.Point attachedPoint; Set<Flag> flagSet = classDefinition.flagSet; if (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) { attachedPoint = AttachPoint.Point.SERVICE; }
class representing a service-decl or object-ctor with service prefix AttachPoint.Point attachedPoint; Set<Flag> flagSet = classDefinition.flagSet; if (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) { attachedPoint = AttachPoint.Point.SERVICE; }
Should use uppercase, I will modify it.
public void getTablePrivStatus(List<TPrivilegeStatus> tblPrivResult, UserIdentity currentUser) { readLock(); try { for (PrivEntry entry : tablePrivTable.getEntries()) { TablePrivEntry tblPrivEntry = (TablePrivEntry) entry; String dbName = ClusterNamespace.getNameFromFullName(tblPrivEntry.getOrigDb()); String tblName = tblPrivEntry.getOrigTbl(); if (dbName.equals("information_schema" /* Don't show privileges in information_schema */) || !checkTblPriv(currentUser, tblPrivEntry.getOrigDb(), tblName, PrivPredicate.SHOW)) { continue; } String grantee = new String("\'").concat(ClusterNamespace.getNameFromFullName(tblPrivEntry.getOrigUser())) .concat("\'@\'").concat(tblPrivEntry.getOrigHost()).concat("\'"); String isGrantable = tblPrivEntry.getPrivSet().get(2) ? "yes" : "no"; for (PaloPrivilege paloPriv : tblPrivEntry.getPrivSet().toPrivilegeList()) { if (paloPriv == PaloPrivilege.GRANT_PRIV) { continue; } TPrivilegeStatus status = new TPrivilegeStatus(); status.setTableName(tblName); status.setPrivilegeType(privilegesInMysql[paloPriv.getIdx()]); status.setGrantee(grantee); status.setSchema(dbName); status.setIsGrantable(isGrantable); tblPrivResult.add(status); } } } finally { readUnlock(); } }
String isGrantable = tblPrivEntry.getPrivSet().get(2) ? "yes" : "no";
public void getTablePrivStatus(List<TPrivilegeStatus> tblPrivResult, UserIdentity currentUser) { readLock(); try { for (PrivEntry entry : tablePrivTable.getEntries()) { TablePrivEntry tblPrivEntry = (TablePrivEntry) entry; String dbName = ClusterNamespace.getNameFromFullName(tblPrivEntry.getOrigDb()); String tblName = tblPrivEntry.getOrigTbl(); if (dbName.equals("information_schema" /* Don't show privileges in information_schema */) || !checkTblPriv(currentUser, tblPrivEntry.getOrigDb(), tblName, PrivPredicate.SHOW)) { continue; } String grantee = new String("\'").concat(ClusterNamespace.getNameFromFullName(tblPrivEntry.getOrigUser())) .concat("\'@\'").concat(tblPrivEntry.getOrigHost()).concat("\'"); String isGrantable = tblPrivEntry.getPrivSet().get(2) ? "YES" : "NO"; for (PaloPrivilege paloPriv : tblPrivEntry.getPrivSet().toPrivilegeList()) { if (!PaloPrivilege.privInPaloToMysql.containsKey(paloPriv)) { continue; } TPrivilegeStatus status = new TPrivilegeStatus(); status.setTableName(tblName); status.setPrivilegeType(PaloPrivilege.privInPaloToMysql.get(paloPriv)); status.setGrantee(grantee); status.setSchema(dbName); status.setIsGrantable(isGrantable); tblPrivResult.add(status); } } } finally { readUnlock(); } }
class PaloAuth implements Writable { private static final Logger LOG = LogManager.getLogger(PaloAuth.class); public static final String ROOT_USER = "root"; public static final String ADMIN_USER = "admin"; private UserPrivTable userPrivTable = new UserPrivTable(); private DbPrivTable dbPrivTable = new DbPrivTable(); private TablePrivTable tablePrivTable = new TablePrivTable(); private ResourcePrivTable resourcePrivTable = new ResourcePrivTable(); private RoleManager roleManager = new RoleManager();; private UserPropertyMgr propertyMgr = new UserPropertyMgr(); private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private void readLock() { lock.readLock().lock(); } private void readUnlock() { lock.readLock().unlock(); } private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public enum PrivLevel { GLOBAL, DATABASE, TABLE, RESOURCE } public PaloAuth() { initUser(); } public UserPrivTable getUserPrivTable() { return userPrivTable; } public DbPrivTable getDbPrivTable() { return dbPrivTable; } public TablePrivTable getTablePrivTable() { return tablePrivTable; } private GlobalPrivEntry grantGlobalPrivs(UserIdentity userIdentity, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { if (errOnExist && errOnNonExist) { throw new DdlException("Can only specified errOnExist or errOnNonExist"); } GlobalPrivEntry entry; try { entry = GlobalPrivEntry.create(userIdentity.getHost(), userIdentity.getQualifiedUser(), userIdentity.isDomain(), new byte[0] /* no use */, privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } userPrivTable.addEntry(entry, errOnExist, errOnNonExist); return entry; } private void revokeGlobalPrivs(UserIdentity userIdentity, PrivBitSet privs, boolean errOnNonExist) throws DdlException { GlobalPrivEntry entry; try { entry = GlobalPrivEntry.create(userIdentity.getHost(), userIdentity.getQualifiedUser(), userIdentity.isDomain(), new byte[0] /* no use */, privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } userPrivTable.revoke(entry, errOnNonExist, false /* not delete entry if priv is empty, because global priv entry has password */); } private void grantDbPrivs(UserIdentity userIdentity, String db, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { DbPrivEntry entry; try { entry = DbPrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } dbPrivTable.addEntry(entry, errOnExist, errOnNonExist); } private void revokeDbPrivs(UserIdentity userIdentity, String db, PrivBitSet privs, boolean errOnNonExist) throws DdlException { DbPrivEntry entry; try { entry = DbPrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } dbPrivTable.revoke(entry, errOnNonExist, true /* delete entry when empty */); } private void grantTblPrivs(UserIdentity userIdentity, String db, String tbl, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { TablePrivEntry entry; try { entry = TablePrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), tbl, userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } tablePrivTable.addEntry(entry, errOnExist, errOnNonExist); } private void revokeTblPrivs(UserIdentity userIdentity, String db, String tbl, PrivBitSet privs, boolean errOnNonExist) throws DdlException { TablePrivEntry entry; try { entry = TablePrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), tbl, userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } tablePrivTable.revoke(entry, errOnNonExist, true /* delete entry when empty */); } private void grantResourcePrivs(UserIdentity userIdentity, String resourceName, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { ResourcePrivEntry entry; try { entry = ResourcePrivEntry.create(userIdentity.getHost(), resourceName, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } resourcePrivTable.addEntry(entry, errOnExist, errOnNonExist); } private void revokeResourcePrivs(UserIdentity userIdentity, String resourceName, PrivBitSet privs, boolean errOnNonExist) throws DdlException { ResourcePrivEntry entry; try { entry = ResourcePrivEntry.create(userIdentity.getHost(), resourceName, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } resourcePrivTable.revoke(entry, errOnNonExist, true /* delete entry when empty */); } /* * check password, if matched, save the userIdentity in matched entry. * the following auth checking should use userIdentity saved in currentUser. */ public boolean checkPassword(String remoteUser, String remoteHost, byte[] remotePasswd, byte[] randomString, List<UserIdentity> currentUser) { if (!Config.enable_auth_check) { return true; } if ((remoteUser.equals(ROOT_USER) || remoteUser.equals(ADMIN_USER)) && remoteHost.equals("127.0.0.1")) { if (remoteUser.equals(ROOT_USER)) { currentUser.add(UserIdentity.ROOT); } else { currentUser.add(UserIdentity.ADMIN); } return true; } readLock(); try { return userPrivTable.checkPassword(remoteUser, remoteHost, remotePasswd, randomString, currentUser); } finally { readUnlock(); } } public boolean checkPlainPassword(String remoteUser, String remoteHost, String remotePasswd, List<UserIdentity> currentUser) { if (!Config.enable_auth_check) { return true; } readLock(); try { return userPrivTable.checkPlainPassword(remoteUser, remoteHost, remotePasswd, currentUser); } finally { readUnlock(); } } public boolean checkGlobalPriv(ConnectContext ctx, PrivPredicate wanted) { return checkGlobalPriv(ctx.getCurrentUserIdentity(), wanted); } public boolean checkGlobalPriv(UserIdentity currentUser, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } public boolean checkDbPriv(ConnectContext ctx, String qualifiedDb, PrivPredicate wanted) { return checkDbPriv(ctx.getCurrentUserIdentity(), qualifiedDb, wanted); } /* * Check if 'user'@'host' on 'db' has 'wanted' priv. * If the given db is null, which means it will no check if database name is matched. */ public boolean checkDbPriv(UserIdentity currentUser, String db, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } if (wanted.getPrivs().containsNodePriv()) { LOG.debug("should not check NODE priv in Database level. user: {}, db: {}", currentUser, db); return false; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs) || checkDbInternal(currentUser, db, wanted, savedPrivs)) { return true; } if (db != null && wanted == PrivPredicate.SHOW && checkTblWithDb(currentUser, db)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } /* * User may not have privs on a database, but have privs of tables in this database. * So we have to check if user has any privs of tables in this database. * if so, the database should be visible to this user. */ private boolean checkTblWithDb(UserIdentity currentUser, String db) { readLock(); try { return tablePrivTable.hasPrivsOfDb(currentUser, db); } finally { readUnlock(); } } public boolean checkTblPriv(ConnectContext ctx, String qualifiedDb, String tbl, PrivPredicate wanted) { return checkTblPriv(ctx.getCurrentUserIdentity(), qualifiedDb, tbl, wanted); } public boolean checkTblPriv(UserIdentity currentUser, String db, String tbl, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } if (wanted.getPrivs().containsNodePriv()) { LOG.debug("should check NODE priv in GLOBAL level. user: {}, db: {}, tbl: {}", currentUser, db, tbl); return false; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs) || checkDbInternal(currentUser, db, wanted, savedPrivs) || checkTblInternal(currentUser, db, tbl, wanted, savedPrivs)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } public boolean checkResourcePriv(ConnectContext ctx, String resourceName, PrivPredicate wanted) { return checkResourcePriv(ctx.getCurrentUserIdentity(), resourceName, wanted); } public boolean checkResourcePriv(UserIdentity currentUser, String resourceName, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs) || checkResourceInternal(currentUser, resourceName, wanted, savedPrivs)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } public boolean checkPrivByAuthInfo(ConnectContext ctx, AuthorizationInfo authInfo, PrivPredicate wanted) { if (authInfo == null) { return false; } if (authInfo.getDbName() == null) { return false; } if (authInfo.getTableNameList() == null || authInfo.getTableNameList().isEmpty()) { return checkDbPriv(ctx, authInfo.getDbName(), wanted); } for (String tblName : authInfo.getTableNameList()) { if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), authInfo.getDbName(), tblName, wanted)) { return false; } } return true; } /* * Check if current user has certain privilege. * This method will check the given privilege levels */ public boolean checkHasPriv(ConnectContext ctx, PrivPredicate priv, PrivLevel... levels) { return checkHasPrivInternal(ctx.getRemoteIP(), ctx.getQualifiedUser(), priv, levels); } private boolean checkHasPrivInternal(String host, String user, PrivPredicate priv, PrivLevel... levels) { for (PrivLevel privLevel : levels) { switch (privLevel) { case GLOBAL: if (userPrivTable.hasPriv(host, user, priv)) { return true; } break; case DATABASE: if (dbPrivTable.hasPriv(host, user, priv)) { return true; } break; case TABLE: if (tablePrivTable.hasPriv(host, user, priv)) { return true; } break; default: break; } } return false; } private boolean checkGlobalInternal(UserIdentity currentUser, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { userPrivTable.getPrivs(currentUser, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } return false; } finally { readUnlock(); } } private boolean checkDbInternal(UserIdentity currentUser, String db, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { dbPrivTable.getPrivs(currentUser, db, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } } finally { readUnlock(); } return false; } private boolean checkTblInternal(UserIdentity currentUser, String db, String tbl, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { tablePrivTable.getPrivs(currentUser, db, tbl, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } return false; } finally { readUnlock(); } } private boolean checkResourceInternal(UserIdentity currentUser, String resourceName, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { resourcePrivTable.getPrivs(currentUser, resourceName, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } return false; } finally { readUnlock(); } } public void clear() { userPrivTable.clear(); dbPrivTable.clear(); tablePrivTable.clear(); resourcePrivTable.clear(); } public void createUser(CreateUserStmt stmt) throws DdlException { createUserInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getPassword(), false); } public void replayCreateUser(PrivInfo privInfo) { try { createUserInternal(privInfo.getUserIdent(), privInfo.getRole(), privInfo.getPasswd(), true); } catch (DdlException e) { LOG.error("should not happen", e); } } /* * Do following steps: * 1. Check does specified role exist. If not, throw exception. * 2. Check does user already exist. If yes, throw exception. * 3. set password for specified user. * 4. grant privs of role to user, if role is specified. */ private void createUserInternal(UserIdentity userIdent, String roleName, byte[] password, boolean isReplay) throws DdlException { writeLock(); try { PaloRole role = null; if (roleName != null) { role = roleManager.getRole(roleName); if (role == null) { throw new DdlException("Role: " + roleName + " does not exist"); } } if (userPrivTable.doesUserExist(userIdent)) { throw new DdlException("User " + userIdent + " already exist"); } setPasswordInternal(userIdent, password, null, false /* err on non exist */, false /* set by resolver */, true /* is replay */); if (role != null) { for (Map.Entry<TablePattern, PrivBitSet> entry : role.getTblPatternToPrivs().entrySet()) { grantInternal(userIdent, null /* role */, entry.getKey(), entry.getValue().copy(), false /* err on non exist */, true /* is replay */); } for (Map.Entry<ResourcePattern, PrivBitSet> entry : role.getResourcePatternToPrivs().entrySet()) { grantInternal(userIdent, null /* role */, entry.getKey(), entry.getValue().copy(), false /* err on non exist */, true /* is replay */); } } if (role != null) { role.addUser(userIdent); } propertyMgr.addUserResource(userIdent.getQualifiedUser(), false /* not system user */); if (!userIdent.getQualifiedUser().equals(ROOT_USER) && !userIdent.getQualifiedUser().equals(ADMIN_USER)) { TablePattern tblPattern = new TablePattern(InfoSchemaDb.DATABASE_NAME, "*"); try { tblPattern.analyze(ClusterNamespace.getClusterNameFromFullName(userIdent.getQualifiedUser())); } catch (AnalysisException e) { LOG.warn("should not happen", e); } grantInternal(userIdent, null /* role */, tblPattern, PrivBitSet.of(PaloPrivilege.SELECT_PRIV), false /* err on non exist */, true /* is replay */); } if (!isReplay) { PrivInfo privInfo = new PrivInfo(userIdent, null, password, roleName); Catalog.getCurrentCatalog().getEditLog().logCreateUser(privInfo); } LOG.info("finished to create user: {}, is replay: {}", userIdent, isReplay); } finally { writeUnlock(); } } public void dropUser(DropUserStmt stmt) throws DdlException { dropUserInternal(stmt.getUserIdentity(), false); } public void replayDropUser(UserIdentity userIdent) { dropUserInternal(userIdent, true); } public void replayOldDropUser(String userName) { UserIdentity userIdentity = new UserIdentity(userName, "%"); userIdentity.setIsAnalyzed(); dropUserInternal(userIdentity, true /* is replay */); } private void dropUserInternal(UserIdentity userIdent, boolean isReplay) { writeLock(); try { userPrivTable.dropUser(userIdent); dbPrivTable.dropUser(userIdent); tablePrivTable.dropUser(userIdent); resourcePrivTable.dropUser(userIdent); roleManager.dropUser(userIdent); if (!userPrivTable.doesUsernameExist(userIdent.getQualifiedUser())) { propertyMgr.dropUser(userIdent); } else if (userIdent.isDomain()) { propertyMgr.removeDomainFromUser(userIdent); } if (!isReplay) { Catalog.getCurrentCatalog().getEditLog().logNewDropUser(userIdent); } LOG.info("finished to drop user: {}, is replay: {}", userIdent.getQualifiedUser(), isReplay); } finally { writeUnlock(); } } public void grant(GrantStmt stmt) throws DdlException { PrivBitSet privs = PrivBitSet.of(stmt.getPrivileges()); if (stmt.getTblPattern() != null) { grantInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getTblPattern(), privs, true /* err on non exist */, false /* not replay */); } else { grantInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getResourcePattern(), privs, true /* err on non exist */, false /* not replay */); } } public void replayGrant(PrivInfo privInfo) { try { if (privInfo.getTblPattern() != null) { grantInternal(privInfo.getUserIdent(), privInfo.getRole(), privInfo.getTblPattern(), privInfo.getPrivs(), true /* err on non exist */, true /* is replay */); } else { grantInternal(privInfo.getUserIdent(), privInfo.getRole(), privInfo.getResourcePattern(), privInfo.getPrivs(), true /* err on non exist */, true /* is replay */); } } catch (DdlException e) { LOG.error("should not happen", e); } } private void grantInternal(UserIdentity userIdent, String role, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole newRole = new PaloRole(role, tblPattern, privs); PaloRole existingRole = roleManager.addRole(newRole, false /* err on exist */); for (UserIdentity user : existingRole.getUsers()) { for (Map.Entry<TablePattern, PrivBitSet> entry : existingRole.getTblPatternToPrivs().entrySet()) { grantPrivs(user, entry.getKey(), entry.getValue().copy(), errOnNonExist); } } } else { grantPrivs(userIdent, tblPattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, tblPattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logGrantPriv(info); } LOG.info("finished to grant privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } private void grantInternal(UserIdentity userIdent, String role, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole newRole = new PaloRole(role, resourcePattern, privs); PaloRole existingRole = roleManager.addRole(newRole, false /* err on exist */); for (UserIdentity user : existingRole.getUsers()) { for (Map.Entry<ResourcePattern, PrivBitSet> entry : existingRole.getResourcePatternToPrivs().entrySet()) { grantPrivs(user, entry.getKey(), entry.getValue().copy(), errOnNonExist); } } } else { grantPrivs(userIdent, resourcePattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, resourcePattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logGrantPriv(info); } LOG.info("finished to grant resource privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } public void grantPrivs(UserIdentity userIdent, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { LOG.debug("grant {} on {} to {}, err on non exist: {}", privs, tblPattern, userIdent, errOnNonExist); writeLock(); try { if (errOnNonExist && !doesUserExist(userIdent)) { throw new DdlException("user " + userIdent + " does not exist"); } switch (tblPattern.getPrivLevel()) { case GLOBAL: grantGlobalPrivs(userIdent, false /* err on exist */, errOnNonExist, privs); break; case DATABASE: grantDbPrivs(userIdent, tblPattern.getQualifiedDb(), false /* err on exist */, false /* err on non exist */, privs); break; case TABLE: grantTblPrivs(userIdent, tblPattern.getQualifiedDb(), tblPattern.getTbl(), false /* err on exist */, false /* err on non exist */, privs); break; default: Preconditions.checkNotNull(null, tblPattern.getPrivLevel()); } } finally { writeUnlock(); } } public void grantPrivs(UserIdentity userIdent, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { LOG.debug("grant {} on resource {} to {}, err on non exist: {}", privs, resourcePattern, userIdent, errOnNonExist); writeLock(); try { if (errOnNonExist && !doesUserExist(userIdent)) { throw new DdlException("user " + userIdent + " does not exist"); } switch (resourcePattern.getPrivLevel()) { case GLOBAL: grantGlobalPrivs(userIdent, false, errOnNonExist, privs); break; case RESOURCE: grantResourcePrivs(userIdent, resourcePattern.getResourceName(), false, false, privs); break; default: Preconditions.checkNotNull(null, resourcePattern.getPrivLevel()); } } finally { writeUnlock(); } } private boolean doesUserExist(UserIdentity userIdent) { if (userIdent.isDomain()) { return propertyMgr.doesUserExist(userIdent); } else { return userPrivTable.doesUserExist(userIdent); } } public void revoke(RevokeStmt stmt) throws DdlException { PrivBitSet privs = PrivBitSet.of(stmt.getPrivileges()); if (stmt.getTblPattern() != null) { revokeInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getTblPattern(), privs, true /* err on non exist */, false /* is replay */); } else { revokeInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getResourcePattern(), privs, true /* err on non exist */, false /* is replay */); } } public void replayRevoke(PrivInfo info) { try { if (info.getTblPattern() != null) { revokeInternal(info.getUserIdent(), info.getRole(), info.getTblPattern(), info.getPrivs(), true /* err on non exist */, true /* is replay */); } else { revokeInternal(info.getUserIdent(), info.getRole(), info.getResourcePattern(), info.getPrivs(), true /* err on non exist */, true /* is replay */); } } catch (DdlException e) { LOG.error("should not happened", e); } } private void revokeInternal(UserIdentity userIdent, String role, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole existingRole = roleManager.revokePrivs(role, tblPattern, privs, errOnNonExist); if (existingRole != null) { for (UserIdentity user : existingRole.getUsers()) { revokePrivs(user, tblPattern, privs, false /* err on non exist */); } } } else { revokePrivs(userIdent, tblPattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, tblPattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logRevokePriv(info); } LOG.info("finished to revoke privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } private void revokeInternal(UserIdentity userIdent, String role, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole existingRole = roleManager.revokePrivs(role, resourcePattern, privs, errOnNonExist); if (existingRole != null) { for (UserIdentity user : existingRole.getUsers()) { revokePrivs(user, resourcePattern, privs, false /* err on non exist */); } } } else { revokePrivs(userIdent, resourcePattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, resourcePattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logRevokePriv(info); } LOG.info("finished to revoke privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } public void revokePrivs(UserIdentity userIdent, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { writeLock(); try { switch (tblPattern.getPrivLevel()) { case GLOBAL: revokeGlobalPrivs(userIdent, privs, errOnNonExist); break; case DATABASE: revokeDbPrivs(userIdent, tblPattern.getQualifiedDb(), privs, errOnNonExist); break; case TABLE: revokeTblPrivs(userIdent, tblPattern.getQualifiedDb(), tblPattern.getTbl(), privs, errOnNonExist); break; default: Preconditions.checkNotNull(null, tblPattern.getPrivLevel()); } } finally { writeUnlock(); } } public void revokePrivs(UserIdentity userIdent, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { writeLock(); try { switch (resourcePattern.getPrivLevel()) { case GLOBAL: revokeGlobalPrivs(userIdent, privs, errOnNonExist); break; case RESOURCE: revokeResourcePrivs(userIdent, resourcePattern.getResourceName(), privs, errOnNonExist); break; } } finally { writeUnlock(); } } public void setPassword(SetPassVar stmt) throws DdlException { setPasswordInternal(stmt.getUserIdent(), stmt.getPassword(), null, true /* err on non exist */, false /* set by resolver */, false); } public void replaySetPassword(PrivInfo info) { try { setPasswordInternal(info.getUserIdent(), info.getPasswd(), null, true /* err on non exist */, false /* set by resolver */, true); } catch (DdlException e) { LOG.error("should not happened", e); } } public void setPasswordInternal(UserIdentity userIdent, byte[] password, UserIdentity domainUserIdent, boolean errOnNonExist, boolean setByResolver, boolean isReplay) throws DdlException { Preconditions.checkArgument(!setByResolver || domainUserIdent != null, setByResolver + ", " + domainUserIdent); writeLock(); try { if (userIdent.isDomain()) { propertyMgr.setPasswordForDomain(userIdent, password, true /* err on exist */, errOnNonExist /* err on non exist */); } else { GlobalPrivEntry passwdEntry; try { passwdEntry = GlobalPrivEntry.create(userIdent.getHost(), userIdent.getQualifiedUser(), userIdent.isDomain(), password, PrivBitSet.of()); passwdEntry.setSetByDomainResolver(setByResolver); if (setByResolver) { Preconditions.checkNotNull(domainUserIdent); passwdEntry.setDomainUserIdent(domainUserIdent); } } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } userPrivTable.setPassword(passwdEntry, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, null, password, null); Catalog.getCurrentCatalog().getEditLog().logSetPassword(info); } } finally { writeUnlock(); } LOG.info("finished to set password for {}. is replay: {}", userIdent, isReplay); } public void createRole(CreateRoleStmt stmt) throws DdlException { createRoleInternal(stmt.getQualifiedRole(), false); } public void replayCreateRole(PrivInfo info) { try { createRoleInternal(info.getRole(), true); } catch (DdlException e) { LOG.error("should not happened", e); } } private void createRoleInternal(String role, boolean isReplay) throws DdlException { PaloRole emptyPrivsRole = new PaloRole(role); writeLock(); try { roleManager.addRole(emptyPrivsRole, true /* err on exist */); if (!isReplay) { PrivInfo info = new PrivInfo(null, null, null, role); Catalog.getCurrentCatalog().getEditLog().logCreateRole(info); } } finally { writeUnlock(); } LOG.info("finished to create role: {}, is replay: {}", role, isReplay); } public void dropRole(DropRoleStmt stmt) throws DdlException { dropRoleInternal(stmt.getQualifiedRole(), false); } public void replayDropRole(PrivInfo info) { try { dropRoleInternal(info.getRole(), true); } catch (DdlException e) { LOG.error("should not happened", e); } } private void dropRoleInternal(String role, boolean isReplay) throws DdlException { writeLock(); try { roleManager.dropRole(role, true /* err on non exist */); if (!isReplay) { PrivInfo info = new PrivInfo(null, null, null, role); Catalog.getCurrentCatalog().getEditLog().logDropRole(info); } } finally { writeUnlock(); } LOG.info("finished to drop role: {}, is replay: {}", role, isReplay); } public void updateUserProperty(SetUserPropertyStmt stmt) throws DdlException { List<Pair<String, String>> properties = stmt.getPropertyPairList(); updateUserPropertyInternal(stmt.getUser(), properties, false /* is replay */); } public void replayUpdateUserProperty(UserPropertyInfo propInfo) throws DdlException { updateUserPropertyInternal(propInfo.getUser(), propInfo.getProperties(), true /* is replay */); } public void updateUserPropertyInternal(String user, List<Pair<String, String>> properties, boolean isReplay) throws DdlException { writeLock(); try { propertyMgr.updateUserProperty(user, properties); if (!isReplay) { UserPropertyInfo propertyInfo = new UserPropertyInfo(user, properties); Catalog.getCurrentCatalog().getEditLog().logUpdateUserProperty(propertyInfo); } LOG.info("finished to set properties for user: {}", user); } finally { writeUnlock(); } } public long getMaxConn(String qualifiedUser) { readLock(); try { return propertyMgr.getMaxConn(qualifiedUser); } finally { readUnlock(); } } public void getAllDomains(Set<String> allDomains) { readLock(); try { propertyMgr.getAllDomains(allDomains); } finally { readUnlock(); } } public void refreshUserPrivEntriesByResovledIPs(Map<String, Set<String>> resolvedIPsMap) { writeLock(); try { userPrivTable.clearEntriesSetByResolver(); propertyMgr.addUserPrivEntriesByResolvedIPs(resolvedIPsMap); } finally { writeUnlock(); } } public List<List<String>> getAuthInfo(UserIdentity specifiedUserIdent) { List<List<String>> userAuthInfos = Lists.newArrayList(); readLock(); try { if (specifiedUserIdent == null) { Set<UserIdentity> userIdents = getAllUserIdents(false /* include entry set by resolver */); for (UserIdentity userIdent : userIdents) { getUserAuthInfo(userAuthInfos, userIdent); } } else { getUserAuthInfo(userAuthInfos, specifiedUserIdent); } } finally { readUnlock(); } return userAuthInfos; } private void getUserAuthInfo(List<List<String>> userAuthInfos, UserIdentity userIdent) { List<String> userAuthInfo = Lists.newArrayList(); for (PrivEntry entry : userPrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } GlobalPrivEntry gEntry = (GlobalPrivEntry) entry; userAuthInfo.add(userIdent.toString()); if (userIdent.isDomain()) { userAuthInfo.add(propertyMgr.doesUserHasPassword(userIdent) ? "No" : "Yes"); } else { userAuthInfo.add((gEntry.getPassword() == null || gEntry.getPassword().length == 0) ? "No" : "Yes"); } userAuthInfo.add(gEntry.getPrivSet().toString() + " (" + gEntry.isSetByDomainResolver() + ")"); break; } if (userAuthInfo.isEmpty()) { if (!userIdent.isDomain()) { LOG.warn("user identity does not have global priv entry: {}", userIdent); userAuthInfo.add(userIdent.toString()); userAuthInfo.add(FeConstants.null_string); userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(userIdent.toString()); userAuthInfo.add(propertyMgr.doesUserHasPassword(userIdent) ? "No" : "Yes"); userAuthInfo.add(FeConstants.null_string); } } List<String> dbPrivs = Lists.newArrayList(); for (PrivEntry entry : dbPrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } DbPrivEntry dEntry = (DbPrivEntry) entry; dbPrivs.add(dEntry.getOrigDb() + ": " + dEntry.getPrivSet().toString() + " (" + entry.isSetByDomainResolver() + ")"); } if (dbPrivs.isEmpty()) { userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(Joiner.on("; ").join(dbPrivs)); } List<String> tblPrivs = Lists.newArrayList(); for (PrivEntry entry : tablePrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } TablePrivEntry tEntry = (TablePrivEntry) entry; tblPrivs.add(tEntry.getOrigDb() + "." + tEntry.getOrigTbl() + ": " + tEntry.getPrivSet().toString() + " (" + entry.isSetByDomainResolver() + ")"); } if (tblPrivs.isEmpty()) { userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(Joiner.on("; ").join(tblPrivs)); } List<String> resourcePrivs = Lists.newArrayList(); for (PrivEntry entry : resourcePrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } ResourcePrivEntry rEntry = (ResourcePrivEntry) entry; resourcePrivs.add(rEntry.getOrigResource() + ": " + rEntry.getPrivSet().toString() + " (" + entry.isSetByDomainResolver() + ")"); } if (resourcePrivs.isEmpty()) { userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(Joiner.on("; ").join(resourcePrivs)); } userAuthInfos.add(userAuthInfo); } private Set<UserIdentity> getAllUserIdents(boolean includeEntrySetByResolver) { Set<UserIdentity> userIdents = Sets.newHashSet(); for (PrivEntry entry : userPrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } for (PrivEntry entry : dbPrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } for (PrivEntry entry : tablePrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } for (PrivEntry entry : resourcePrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } return userIdents; } public List<List<String>> getUserProperties(String qualifiedUser) { readLock(); try { return propertyMgr.fetchUserProperty(qualifiedUser); } catch (AnalysisException e) { return Lists.newArrayList(); } finally { readUnlock(); } } public void dropUserOfCluster(String clusterName, boolean isReplay) { writeLock(); try { Set<UserIdentity> allUserIdents = getAllUserIdents(true); for (UserIdentity userIdent : allUserIdents) { if (userIdent.getQualifiedUser().startsWith(clusterName)) { dropUserInternal(userIdent, isReplay); } } } finally { writeUnlock(); } } public Pair<String, DppConfig> getLoadClusterInfo(String qualifiedUser, String cluster) throws DdlException { readLock(); try { return propertyMgr.getLoadClusterInfo(qualifiedUser, cluster); } finally { readUnlock(); } } public boolean checkCanEnterCluster(ConnectContext ctx, String clusterName) { readLock(); try { if (checkGlobalPriv(ctx, PrivPredicate.ALL)) { return true; } if (dbPrivTable.hasClusterPriv(ctx, clusterName)) { return true; } if (tablePrivTable.hasClusterPriv(ctx, clusterName)) { return true; } return false; } finally { readUnlock(); } } private void initUser() { try { UserIdentity rootUser = new UserIdentity(ROOT_USER, "%"); rootUser.setIsAnalyzed(); createUserInternal(rootUser, PaloRole.OPERATOR_ROLE, new byte[0], true /* is replay */); UserIdentity adminUser = new UserIdentity(ADMIN_USER, "%"); adminUser.setIsAnalyzed(); createUserInternal(adminUser, PaloRole.ADMIN_ROLE, new byte[0], true /* is replay */); } catch (DdlException e) { LOG.error("should not happened", e); } } public TFetchResourceResult toResourceThrift() { readLock(); try { return propertyMgr.toResourceThrift(); } finally { readUnlock(); } } public List<List<String>> getRoleInfo() { readLock(); try { List<List<String>> results = Lists.newArrayList(); roleManager.getRoleInfo(results); return results; } finally { readUnlock(); } } private final String[] privilegesInMysql = new String[]{"", "", "", "SELECT", "INSERT", "ALTER", "CREATE", "DROP", "USAGE"}; public void getSchemaPrivStatus(List<TPrivilegeStatus> dbPrivResult, UserIdentity currentUser) { readLock(); try { for (PrivEntry entry : dbPrivTable.getEntries()) { DbPrivEntry dbPrivEntry = (DbPrivEntry) entry; String origDb = dbPrivEntry.getOrigDb(); String dbName = ClusterNamespace.getNameFromFullName(dbPrivEntry.getOrigDb()); if (dbName.equals("information_schema" /* Don't show privileges in information_schema */) || !checkDbPriv(currentUser, origDb, PrivPredicate.SHOW)) { continue; } String grantee = new String("\'").concat(ClusterNamespace.getNameFromFullName(dbPrivEntry.getOrigUser())) .concat("\'@\'").concat(dbPrivEntry.getOrigHost()).concat("\'"); String isGrantable = dbPrivEntry.getPrivSet().get(2) ? "yes" : "no"; for (PaloPrivilege paloPriv : dbPrivEntry.getPrivSet().toPrivilegeList()) { if (paloPriv == PaloPrivilege.GRANT_PRIV) { continue; } TPrivilegeStatus status = new TPrivilegeStatus(); status.setPrivilegeType(privilegesInMysql[paloPriv.getIdx()]); status.setGrantee(grantee); status.setSchema(dbName); status.setIsGrantable(isGrantable); dbPrivResult.add(status); } } } finally { readUnlock(); } } public void getGlobalPrivStatus(List<TPrivilegeStatus> userPrivResult, UserIdentity currentUser) { readLock(); try { if (!checkGlobalPriv(currentUser, PrivPredicate.SHOW)) { return; } for (PrivEntry userPrivEntry : userPrivTable.getEntries()) { String grantee = new String("\'").concat(ClusterNamespace.getNameFromFullName(userPrivEntry.getOrigUser())) .concat("\'@\'").concat(userPrivEntry.getOrigHost()).concat("\'"); String isGrantable = userPrivEntry.getPrivSet().get(2) ? "yes" : "no"; for (PaloPrivilege paloPriv : userPrivEntry.getPrivSet().toPrivilegeList()) { if (paloPriv == PaloPrivilege.GRANT_PRIV || paloPriv == PaloPrivilege.NODE_PRIV /* Don't show NODE_PRIV, which doesn't exist in mysql */) { continue; } if (paloPriv == PaloPrivilege.ADMIN_PRIV) { for (String priv : privilegesInMysql) { if(!priv.isEmpty()){ TPrivilegeStatus status = new TPrivilegeStatus(); status.setPrivilegeType(priv); status.setGrantee(grantee); status.setIsGrantable("yes"); userPrivResult.add(status); } } break; } TPrivilegeStatus status = new TPrivilegeStatus(); status.setPrivilegeType(paloPriv.toString()); status.setGrantee(grantee); status.setIsGrantable(isGrantable); userPrivResult.add(status); } } } finally { readUnlock(); } } public static PaloAuth read(DataInput in) throws IOException { PaloAuth auth = new PaloAuth(); auth.readFields(in); return auth; } @Override public void write(DataOutput out) throws IOException { roleManager.write(out); userPrivTable.write(out); dbPrivTable.write(out); tablePrivTable.write(out); resourcePrivTable.write(out); propertyMgr.write(out); } public void readFields(DataInput in) throws IOException { roleManager = RoleManager.read(in); userPrivTable = (UserPrivTable) PrivTable.read(in); dbPrivTable = (DbPrivTable) PrivTable.read(in); tablePrivTable = (TablePrivTable) PrivTable.read(in); if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_87) { resourcePrivTable = (ResourcePrivTable) PrivTable.read(in); } propertyMgr = UserPropertyMgr.read(in); if (userPrivTable.isEmpty()) { initUser(); } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(userPrivTable).append("\n"); sb.append(dbPrivTable).append("\n"); sb.append(tablePrivTable).append("\n"); sb.append(resourcePrivTable).append("\n"); sb.append(roleManager).append("\n"); sb.append(propertyMgr).append("\n"); return sb.toString(); } }
class PaloAuth implements Writable { private static final Logger LOG = LogManager.getLogger(PaloAuth.class); public static final String ROOT_USER = "root"; public static final String ADMIN_USER = "admin"; private UserPrivTable userPrivTable = new UserPrivTable(); private DbPrivTable dbPrivTable = new DbPrivTable(); private TablePrivTable tablePrivTable = new TablePrivTable(); private ResourcePrivTable resourcePrivTable = new ResourcePrivTable(); private RoleManager roleManager = new RoleManager();; private UserPropertyMgr propertyMgr = new UserPropertyMgr(); private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private void readLock() { lock.readLock().lock(); } private void readUnlock() { lock.readLock().unlock(); } private void writeLock() { lock.writeLock().lock(); } private void writeUnlock() { lock.writeLock().unlock(); } public enum PrivLevel { GLOBAL, DATABASE, TABLE, RESOURCE } public PaloAuth() { initUser(); } public UserPrivTable getUserPrivTable() { return userPrivTable; } public DbPrivTable getDbPrivTable() { return dbPrivTable; } public TablePrivTable getTablePrivTable() { return tablePrivTable; } private GlobalPrivEntry grantGlobalPrivs(UserIdentity userIdentity, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { if (errOnExist && errOnNonExist) { throw new DdlException("Can only specified errOnExist or errOnNonExist"); } GlobalPrivEntry entry; try { entry = GlobalPrivEntry.create(userIdentity.getHost(), userIdentity.getQualifiedUser(), userIdentity.isDomain(), new byte[0] /* no use */, privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } userPrivTable.addEntry(entry, errOnExist, errOnNonExist); return entry; } private void revokeGlobalPrivs(UserIdentity userIdentity, PrivBitSet privs, boolean errOnNonExist) throws DdlException { GlobalPrivEntry entry; try { entry = GlobalPrivEntry.create(userIdentity.getHost(), userIdentity.getQualifiedUser(), userIdentity.isDomain(), new byte[0] /* no use */, privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } userPrivTable.revoke(entry, errOnNonExist, false /* not delete entry if priv is empty, because global priv entry has password */); } private void grantDbPrivs(UserIdentity userIdentity, String db, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { DbPrivEntry entry; try { entry = DbPrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } dbPrivTable.addEntry(entry, errOnExist, errOnNonExist); } private void revokeDbPrivs(UserIdentity userIdentity, String db, PrivBitSet privs, boolean errOnNonExist) throws DdlException { DbPrivEntry entry; try { entry = DbPrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } dbPrivTable.revoke(entry, errOnNonExist, true /* delete entry when empty */); } private void grantTblPrivs(UserIdentity userIdentity, String db, String tbl, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { TablePrivEntry entry; try { entry = TablePrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), tbl, userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } tablePrivTable.addEntry(entry, errOnExist, errOnNonExist); } private void revokeTblPrivs(UserIdentity userIdentity, String db, String tbl, PrivBitSet privs, boolean errOnNonExist) throws DdlException { TablePrivEntry entry; try { entry = TablePrivEntry.create(userIdentity.getHost(), db, userIdentity.getQualifiedUser(), tbl, userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } tablePrivTable.revoke(entry, errOnNonExist, true /* delete entry when empty */); } private void grantResourcePrivs(UserIdentity userIdentity, String resourceName, boolean errOnExist, boolean errOnNonExist, PrivBitSet privs) throws DdlException { ResourcePrivEntry entry; try { entry = ResourcePrivEntry.create(userIdentity.getHost(), resourceName, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } resourcePrivTable.addEntry(entry, errOnExist, errOnNonExist); } private void revokeResourcePrivs(UserIdentity userIdentity, String resourceName, PrivBitSet privs, boolean errOnNonExist) throws DdlException { ResourcePrivEntry entry; try { entry = ResourcePrivEntry.create(userIdentity.getHost(), resourceName, userIdentity.getQualifiedUser(), userIdentity.isDomain(), privs); entry.setSetByDomainResolver(false); } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } resourcePrivTable.revoke(entry, errOnNonExist, true /* delete entry when empty */); } /* * check password, if matched, save the userIdentity in matched entry. * the following auth checking should use userIdentity saved in currentUser. */ public boolean checkPassword(String remoteUser, String remoteHost, byte[] remotePasswd, byte[] randomString, List<UserIdentity> currentUser) { if (!Config.enable_auth_check) { return true; } if ((remoteUser.equals(ROOT_USER) || remoteUser.equals(ADMIN_USER)) && remoteHost.equals("127.0.0.1")) { if (remoteUser.equals(ROOT_USER)) { currentUser.add(UserIdentity.ROOT); } else { currentUser.add(UserIdentity.ADMIN); } return true; } readLock(); try { return userPrivTable.checkPassword(remoteUser, remoteHost, remotePasswd, randomString, currentUser); } finally { readUnlock(); } } public boolean checkPlainPassword(String remoteUser, String remoteHost, String remotePasswd, List<UserIdentity> currentUser) { if (!Config.enable_auth_check) { return true; } readLock(); try { return userPrivTable.checkPlainPassword(remoteUser, remoteHost, remotePasswd, currentUser); } finally { readUnlock(); } } public boolean checkGlobalPriv(ConnectContext ctx, PrivPredicate wanted) { return checkGlobalPriv(ctx.getCurrentUserIdentity(), wanted); } public boolean checkGlobalPriv(UserIdentity currentUser, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } public boolean checkDbPriv(ConnectContext ctx, String qualifiedDb, PrivPredicate wanted) { return checkDbPriv(ctx.getCurrentUserIdentity(), qualifiedDb, wanted); } /* * Check if 'user'@'host' on 'db' has 'wanted' priv. * If the given db is null, which means it will no check if database name is matched. */ public boolean checkDbPriv(UserIdentity currentUser, String db, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } if (wanted.getPrivs().containsNodePriv()) { LOG.debug("should not check NODE priv in Database level. user: {}, db: {}", currentUser, db); return false; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs) || checkDbInternal(currentUser, db, wanted, savedPrivs)) { return true; } if (db != null && wanted == PrivPredicate.SHOW && checkTblWithDb(currentUser, db)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } /* * User may not have privs on a database, but have privs of tables in this database. * So we have to check if user has any privs of tables in this database. * if so, the database should be visible to this user. */ private boolean checkTblWithDb(UserIdentity currentUser, String db) { readLock(); try { return tablePrivTable.hasPrivsOfDb(currentUser, db); } finally { readUnlock(); } } public boolean checkTblPriv(ConnectContext ctx, String qualifiedDb, String tbl, PrivPredicate wanted) { return checkTblPriv(ctx.getCurrentUserIdentity(), qualifiedDb, tbl, wanted); } public boolean checkTblPriv(UserIdentity currentUser, String db, String tbl, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } if (wanted.getPrivs().containsNodePriv()) { LOG.debug("should check NODE priv in GLOBAL level. user: {}, db: {}, tbl: {}", currentUser, db, tbl); return false; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs) || checkDbInternal(currentUser, db, wanted, savedPrivs) || checkTblInternal(currentUser, db, tbl, wanted, savedPrivs)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } public boolean checkResourcePriv(ConnectContext ctx, String resourceName, PrivPredicate wanted) { return checkResourcePriv(ctx.getCurrentUserIdentity(), resourceName, wanted); } public boolean checkResourcePriv(UserIdentity currentUser, String resourceName, PrivPredicate wanted) { if (!Config.enable_auth_check) { return true; } PrivBitSet savedPrivs = PrivBitSet.of(); if (checkGlobalInternal(currentUser, wanted, savedPrivs) || checkResourceInternal(currentUser, resourceName, wanted, savedPrivs)) { return true; } LOG.debug("failed to get wanted privs: {}, granted: {}", wanted, savedPrivs); return false; } public boolean checkPrivByAuthInfo(ConnectContext ctx, AuthorizationInfo authInfo, PrivPredicate wanted) { if (authInfo == null) { return false; } if (authInfo.getDbName() == null) { return false; } if (authInfo.getTableNameList() == null || authInfo.getTableNameList().isEmpty()) { return checkDbPriv(ctx, authInfo.getDbName(), wanted); } for (String tblName : authInfo.getTableNameList()) { if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), authInfo.getDbName(), tblName, wanted)) { return false; } } return true; } /* * Check if current user has certain privilege. * This method will check the given privilege levels */ public boolean checkHasPriv(ConnectContext ctx, PrivPredicate priv, PrivLevel... levels) { return checkHasPrivInternal(ctx.getRemoteIP(), ctx.getQualifiedUser(), priv, levels); } private boolean checkHasPrivInternal(String host, String user, PrivPredicate priv, PrivLevel... levels) { for (PrivLevel privLevel : levels) { switch (privLevel) { case GLOBAL: if (userPrivTable.hasPriv(host, user, priv)) { return true; } break; case DATABASE: if (dbPrivTable.hasPriv(host, user, priv)) { return true; } break; case TABLE: if (tablePrivTable.hasPriv(host, user, priv)) { return true; } break; default: break; } } return false; } private boolean checkGlobalInternal(UserIdentity currentUser, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { userPrivTable.getPrivs(currentUser, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } return false; } finally { readUnlock(); } } private boolean checkDbInternal(UserIdentity currentUser, String db, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { dbPrivTable.getPrivs(currentUser, db, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } } finally { readUnlock(); } return false; } private boolean checkTblInternal(UserIdentity currentUser, String db, String tbl, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { tablePrivTable.getPrivs(currentUser, db, tbl, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } return false; } finally { readUnlock(); } } private boolean checkResourceInternal(UserIdentity currentUser, String resourceName, PrivPredicate wanted, PrivBitSet savedPrivs) { readLock(); try { resourcePrivTable.getPrivs(currentUser, resourceName, savedPrivs); if (PaloPrivilege.satisfy(savedPrivs, wanted)) { return true; } return false; } finally { readUnlock(); } } public void clear() { userPrivTable.clear(); dbPrivTable.clear(); tablePrivTable.clear(); resourcePrivTable.clear(); } public void createUser(CreateUserStmt stmt) throws DdlException { createUserInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getPassword(), false); } public void replayCreateUser(PrivInfo privInfo) { try { createUserInternal(privInfo.getUserIdent(), privInfo.getRole(), privInfo.getPasswd(), true); } catch (DdlException e) { LOG.error("should not happen", e); } } /* * Do following steps: * 1. Check does specified role exist. If not, throw exception. * 2. Check does user already exist. If yes, throw exception. * 3. set password for specified user. * 4. grant privs of role to user, if role is specified. */ private void createUserInternal(UserIdentity userIdent, String roleName, byte[] password, boolean isReplay) throws DdlException { writeLock(); try { PaloRole role = null; if (roleName != null) { role = roleManager.getRole(roleName); if (role == null) { throw new DdlException("Role: " + roleName + " does not exist"); } } if (userPrivTable.doesUserExist(userIdent)) { throw new DdlException("User " + userIdent + " already exist"); } setPasswordInternal(userIdent, password, null, false /* err on non exist */, false /* set by resolver */, true /* is replay */); if (role != null) { for (Map.Entry<TablePattern, PrivBitSet> entry : role.getTblPatternToPrivs().entrySet()) { grantInternal(userIdent, null /* role */, entry.getKey(), entry.getValue().copy(), false /* err on non exist */, true /* is replay */); } for (Map.Entry<ResourcePattern, PrivBitSet> entry : role.getResourcePatternToPrivs().entrySet()) { grantInternal(userIdent, null /* role */, entry.getKey(), entry.getValue().copy(), false /* err on non exist */, true /* is replay */); } } if (role != null) { role.addUser(userIdent); } propertyMgr.addUserResource(userIdent.getQualifiedUser(), false /* not system user */); if (!userIdent.getQualifiedUser().equals(ROOT_USER) && !userIdent.getQualifiedUser().equals(ADMIN_USER)) { TablePattern tblPattern = new TablePattern(InfoSchemaDb.DATABASE_NAME, "*"); try { tblPattern.analyze(ClusterNamespace.getClusterNameFromFullName(userIdent.getQualifiedUser())); } catch (AnalysisException e) { LOG.warn("should not happen", e); } grantInternal(userIdent, null /* role */, tblPattern, PrivBitSet.of(PaloPrivilege.SELECT_PRIV), false /* err on non exist */, true /* is replay */); } if (!isReplay) { PrivInfo privInfo = new PrivInfo(userIdent, null, password, roleName); Catalog.getCurrentCatalog().getEditLog().logCreateUser(privInfo); } LOG.info("finished to create user: {}, is replay: {}", userIdent, isReplay); } finally { writeUnlock(); } } public void dropUser(DropUserStmt stmt) throws DdlException { dropUserInternal(stmt.getUserIdentity(), false); } public void replayDropUser(UserIdentity userIdent) { dropUserInternal(userIdent, true); } public void replayOldDropUser(String userName) { UserIdentity userIdentity = new UserIdentity(userName, "%"); userIdentity.setIsAnalyzed(); dropUserInternal(userIdentity, true /* is replay */); } private void dropUserInternal(UserIdentity userIdent, boolean isReplay) { writeLock(); try { userPrivTable.dropUser(userIdent); dbPrivTable.dropUser(userIdent); tablePrivTable.dropUser(userIdent); resourcePrivTable.dropUser(userIdent); roleManager.dropUser(userIdent); if (!userPrivTable.doesUsernameExist(userIdent.getQualifiedUser())) { propertyMgr.dropUser(userIdent); } else if (userIdent.isDomain()) { propertyMgr.removeDomainFromUser(userIdent); } if (!isReplay) { Catalog.getCurrentCatalog().getEditLog().logNewDropUser(userIdent); } LOG.info("finished to drop user: {}, is replay: {}", userIdent.getQualifiedUser(), isReplay); } finally { writeUnlock(); } } public void grant(GrantStmt stmt) throws DdlException { PrivBitSet privs = PrivBitSet.of(stmt.getPrivileges()); if (stmt.getTblPattern() != null) { grantInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getTblPattern(), privs, true /* err on non exist */, false /* not replay */); } else { grantInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getResourcePattern(), privs, true /* err on non exist */, false /* not replay */); } } public void replayGrant(PrivInfo privInfo) { try { if (privInfo.getTblPattern() != null) { grantInternal(privInfo.getUserIdent(), privInfo.getRole(), privInfo.getTblPattern(), privInfo.getPrivs(), true /* err on non exist */, true /* is replay */); } else { grantInternal(privInfo.getUserIdent(), privInfo.getRole(), privInfo.getResourcePattern(), privInfo.getPrivs(), true /* err on non exist */, true /* is replay */); } } catch (DdlException e) { LOG.error("should not happen", e); } } private void grantInternal(UserIdentity userIdent, String role, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole newRole = new PaloRole(role, tblPattern, privs); PaloRole existingRole = roleManager.addRole(newRole, false /* err on exist */); for (UserIdentity user : existingRole.getUsers()) { for (Map.Entry<TablePattern, PrivBitSet> entry : existingRole.getTblPatternToPrivs().entrySet()) { grantPrivs(user, entry.getKey(), entry.getValue().copy(), errOnNonExist); } } } else { grantPrivs(userIdent, tblPattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, tblPattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logGrantPriv(info); } LOG.info("finished to grant privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } private void grantInternal(UserIdentity userIdent, String role, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole newRole = new PaloRole(role, resourcePattern, privs); PaloRole existingRole = roleManager.addRole(newRole, false /* err on exist */); for (UserIdentity user : existingRole.getUsers()) { for (Map.Entry<ResourcePattern, PrivBitSet> entry : existingRole.getResourcePatternToPrivs().entrySet()) { grantPrivs(user, entry.getKey(), entry.getValue().copy(), errOnNonExist); } } } else { grantPrivs(userIdent, resourcePattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, resourcePattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logGrantPriv(info); } LOG.info("finished to grant resource privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } public void grantPrivs(UserIdentity userIdent, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { LOG.debug("grant {} on {} to {}, err on non exist: {}", privs, tblPattern, userIdent, errOnNonExist); writeLock(); try { if (errOnNonExist && !doesUserExist(userIdent)) { throw new DdlException("user " + userIdent + " does not exist"); } switch (tblPattern.getPrivLevel()) { case GLOBAL: grantGlobalPrivs(userIdent, false /* err on exist */, errOnNonExist, privs); break; case DATABASE: grantDbPrivs(userIdent, tblPattern.getQualifiedDb(), false /* err on exist */, false /* err on non exist */, privs); break; case TABLE: grantTblPrivs(userIdent, tblPattern.getQualifiedDb(), tblPattern.getTbl(), false /* err on exist */, false /* err on non exist */, privs); break; default: Preconditions.checkNotNull(null, tblPattern.getPrivLevel()); } } finally { writeUnlock(); } } public void grantPrivs(UserIdentity userIdent, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { LOG.debug("grant {} on resource {} to {}, err on non exist: {}", privs, resourcePattern, userIdent, errOnNonExist); writeLock(); try { if (errOnNonExist && !doesUserExist(userIdent)) { throw new DdlException("user " + userIdent + " does not exist"); } switch (resourcePattern.getPrivLevel()) { case GLOBAL: grantGlobalPrivs(userIdent, false, errOnNonExist, privs); break; case RESOURCE: grantResourcePrivs(userIdent, resourcePattern.getResourceName(), false, false, privs); break; default: Preconditions.checkNotNull(null, resourcePattern.getPrivLevel()); } } finally { writeUnlock(); } } private boolean doesUserExist(UserIdentity userIdent) { if (userIdent.isDomain()) { return propertyMgr.doesUserExist(userIdent); } else { return userPrivTable.doesUserExist(userIdent); } } public void revoke(RevokeStmt stmt) throws DdlException { PrivBitSet privs = PrivBitSet.of(stmt.getPrivileges()); if (stmt.getTblPattern() != null) { revokeInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getTblPattern(), privs, true /* err on non exist */, false /* is replay */); } else { revokeInternal(stmt.getUserIdent(), stmt.getQualifiedRole(), stmt.getResourcePattern(), privs, true /* err on non exist */, false /* is replay */); } } public void replayRevoke(PrivInfo info) { try { if (info.getTblPattern() != null) { revokeInternal(info.getUserIdent(), info.getRole(), info.getTblPattern(), info.getPrivs(), true /* err on non exist */, true /* is replay */); } else { revokeInternal(info.getUserIdent(), info.getRole(), info.getResourcePattern(), info.getPrivs(), true /* err on non exist */, true /* is replay */); } } catch (DdlException e) { LOG.error("should not happened", e); } } private void revokeInternal(UserIdentity userIdent, String role, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole existingRole = roleManager.revokePrivs(role, tblPattern, privs, errOnNonExist); if (existingRole != null) { for (UserIdentity user : existingRole.getUsers()) { revokePrivs(user, tblPattern, privs, false /* err on non exist */); } } } else { revokePrivs(userIdent, tblPattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, tblPattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logRevokePriv(info); } LOG.info("finished to revoke privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } private void revokeInternal(UserIdentity userIdent, String role, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist, boolean isReplay) throws DdlException { writeLock(); try { if (role != null) { PaloRole existingRole = roleManager.revokePrivs(role, resourcePattern, privs, errOnNonExist); if (existingRole != null) { for (UserIdentity user : existingRole.getUsers()) { revokePrivs(user, resourcePattern, privs, false /* err on non exist */); } } } else { revokePrivs(userIdent, resourcePattern, privs, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, resourcePattern, privs, null, role); Catalog.getCurrentCatalog().getEditLog().logRevokePriv(info); } LOG.info("finished to revoke privilege. is replay: {}", isReplay); } finally { writeUnlock(); } } public void revokePrivs(UserIdentity userIdent, TablePattern tblPattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { writeLock(); try { switch (tblPattern.getPrivLevel()) { case GLOBAL: revokeGlobalPrivs(userIdent, privs, errOnNonExist); break; case DATABASE: revokeDbPrivs(userIdent, tblPattern.getQualifiedDb(), privs, errOnNonExist); break; case TABLE: revokeTblPrivs(userIdent, tblPattern.getQualifiedDb(), tblPattern.getTbl(), privs, errOnNonExist); break; default: Preconditions.checkNotNull(null, tblPattern.getPrivLevel()); } } finally { writeUnlock(); } } public void revokePrivs(UserIdentity userIdent, ResourcePattern resourcePattern, PrivBitSet privs, boolean errOnNonExist) throws DdlException { writeLock(); try { switch (resourcePattern.getPrivLevel()) { case GLOBAL: revokeGlobalPrivs(userIdent, privs, errOnNonExist); break; case RESOURCE: revokeResourcePrivs(userIdent, resourcePattern.getResourceName(), privs, errOnNonExist); break; } } finally { writeUnlock(); } } public void setPassword(SetPassVar stmt) throws DdlException { setPasswordInternal(stmt.getUserIdent(), stmt.getPassword(), null, true /* err on non exist */, false /* set by resolver */, false); } public void replaySetPassword(PrivInfo info) { try { setPasswordInternal(info.getUserIdent(), info.getPasswd(), null, true /* err on non exist */, false /* set by resolver */, true); } catch (DdlException e) { LOG.error("should not happened", e); } } public void setPasswordInternal(UserIdentity userIdent, byte[] password, UserIdentity domainUserIdent, boolean errOnNonExist, boolean setByResolver, boolean isReplay) throws DdlException { Preconditions.checkArgument(!setByResolver || domainUserIdent != null, setByResolver + ", " + domainUserIdent); writeLock(); try { if (userIdent.isDomain()) { propertyMgr.setPasswordForDomain(userIdent, password, true /* err on exist */, errOnNonExist /* err on non exist */); } else { GlobalPrivEntry passwdEntry; try { passwdEntry = GlobalPrivEntry.create(userIdent.getHost(), userIdent.getQualifiedUser(), userIdent.isDomain(), password, PrivBitSet.of()); passwdEntry.setSetByDomainResolver(setByResolver); if (setByResolver) { Preconditions.checkNotNull(domainUserIdent); passwdEntry.setDomainUserIdent(domainUserIdent); } } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } userPrivTable.setPassword(passwdEntry, errOnNonExist); } if (!isReplay) { PrivInfo info = new PrivInfo(userIdent, null, password, null); Catalog.getCurrentCatalog().getEditLog().logSetPassword(info); } } finally { writeUnlock(); } LOG.info("finished to set password for {}. is replay: {}", userIdent, isReplay); } public void createRole(CreateRoleStmt stmt) throws DdlException { createRoleInternal(stmt.getQualifiedRole(), false); } public void replayCreateRole(PrivInfo info) { try { createRoleInternal(info.getRole(), true); } catch (DdlException e) { LOG.error("should not happened", e); } } private void createRoleInternal(String role, boolean isReplay) throws DdlException { PaloRole emptyPrivsRole = new PaloRole(role); writeLock(); try { roleManager.addRole(emptyPrivsRole, true /* err on exist */); if (!isReplay) { PrivInfo info = new PrivInfo(null, null, null, role); Catalog.getCurrentCatalog().getEditLog().logCreateRole(info); } } finally { writeUnlock(); } LOG.info("finished to create role: {}, is replay: {}", role, isReplay); } public void dropRole(DropRoleStmt stmt) throws DdlException { dropRoleInternal(stmt.getQualifiedRole(), false); } public void replayDropRole(PrivInfo info) { try { dropRoleInternal(info.getRole(), true); } catch (DdlException e) { LOG.error("should not happened", e); } } private void dropRoleInternal(String role, boolean isReplay) throws DdlException { writeLock(); try { roleManager.dropRole(role, true /* err on non exist */); if (!isReplay) { PrivInfo info = new PrivInfo(null, null, null, role); Catalog.getCurrentCatalog().getEditLog().logDropRole(info); } } finally { writeUnlock(); } LOG.info("finished to drop role: {}, is replay: {}", role, isReplay); } public void updateUserProperty(SetUserPropertyStmt stmt) throws DdlException { List<Pair<String, String>> properties = stmt.getPropertyPairList(); updateUserPropertyInternal(stmt.getUser(), properties, false /* is replay */); } public void replayUpdateUserProperty(UserPropertyInfo propInfo) throws DdlException { updateUserPropertyInternal(propInfo.getUser(), propInfo.getProperties(), true /* is replay */); } public void updateUserPropertyInternal(String user, List<Pair<String, String>> properties, boolean isReplay) throws DdlException { writeLock(); try { propertyMgr.updateUserProperty(user, properties); if (!isReplay) { UserPropertyInfo propertyInfo = new UserPropertyInfo(user, properties); Catalog.getCurrentCatalog().getEditLog().logUpdateUserProperty(propertyInfo); } LOG.info("finished to set properties for user: {}", user); } finally { writeUnlock(); } } public long getMaxConn(String qualifiedUser) { readLock(); try { return propertyMgr.getMaxConn(qualifiedUser); } finally { readUnlock(); } } public void getAllDomains(Set<String> allDomains) { readLock(); try { propertyMgr.getAllDomains(allDomains); } finally { readUnlock(); } } public void refreshUserPrivEntriesByResovledIPs(Map<String, Set<String>> resolvedIPsMap) { writeLock(); try { userPrivTable.clearEntriesSetByResolver(); propertyMgr.addUserPrivEntriesByResolvedIPs(resolvedIPsMap); } finally { writeUnlock(); } } public List<List<String>> getAuthInfo(UserIdentity specifiedUserIdent) { List<List<String>> userAuthInfos = Lists.newArrayList(); readLock(); try { if (specifiedUserIdent == null) { Set<UserIdentity> userIdents = getAllUserIdents(false /* include entry set by resolver */); for (UserIdentity userIdent : userIdents) { getUserAuthInfo(userAuthInfos, userIdent); } } else { getUserAuthInfo(userAuthInfos, specifiedUserIdent); } } finally { readUnlock(); } return userAuthInfos; } private void getUserAuthInfo(List<List<String>> userAuthInfos, UserIdentity userIdent) { List<String> userAuthInfo = Lists.newArrayList(); for (PrivEntry entry : userPrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } GlobalPrivEntry gEntry = (GlobalPrivEntry) entry; userAuthInfo.add(userIdent.toString()); if (userIdent.isDomain()) { userAuthInfo.add(propertyMgr.doesUserHasPassword(userIdent) ? "No" : "Yes"); } else { userAuthInfo.add((gEntry.getPassword() == null || gEntry.getPassword().length == 0) ? "No" : "Yes"); } userAuthInfo.add(gEntry.getPrivSet().toString() + " (" + gEntry.isSetByDomainResolver() + ")"); break; } if (userAuthInfo.isEmpty()) { if (!userIdent.isDomain()) { LOG.warn("user identity does not have global priv entry: {}", userIdent); userAuthInfo.add(userIdent.toString()); userAuthInfo.add(FeConstants.null_string); userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(userIdent.toString()); userAuthInfo.add(propertyMgr.doesUserHasPassword(userIdent) ? "No" : "Yes"); userAuthInfo.add(FeConstants.null_string); } } List<String> dbPrivs = Lists.newArrayList(); for (PrivEntry entry : dbPrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } DbPrivEntry dEntry = (DbPrivEntry) entry; dbPrivs.add(dEntry.getOrigDb() + ": " + dEntry.getPrivSet().toString() + " (" + entry.isSetByDomainResolver() + ")"); } if (dbPrivs.isEmpty()) { userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(Joiner.on("; ").join(dbPrivs)); } List<String> tblPrivs = Lists.newArrayList(); for (PrivEntry entry : tablePrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } TablePrivEntry tEntry = (TablePrivEntry) entry; tblPrivs.add(tEntry.getOrigDb() + "." + tEntry.getOrigTbl() + ": " + tEntry.getPrivSet().toString() + " (" + entry.isSetByDomainResolver() + ")"); } if (tblPrivs.isEmpty()) { userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(Joiner.on("; ").join(tblPrivs)); } List<String> resourcePrivs = Lists.newArrayList(); for (PrivEntry entry : resourcePrivTable.entries) { if (!entry.match(userIdent, true /* exact match */)) { continue; } ResourcePrivEntry rEntry = (ResourcePrivEntry) entry; resourcePrivs.add(rEntry.getOrigResource() + ": " + rEntry.getPrivSet().toString() + " (" + entry.isSetByDomainResolver() + ")"); } if (resourcePrivs.isEmpty()) { userAuthInfo.add(FeConstants.null_string); } else { userAuthInfo.add(Joiner.on("; ").join(resourcePrivs)); } userAuthInfos.add(userAuthInfo); } private Set<UserIdentity> getAllUserIdents(boolean includeEntrySetByResolver) { Set<UserIdentity> userIdents = Sets.newHashSet(); for (PrivEntry entry : userPrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } for (PrivEntry entry : dbPrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } for (PrivEntry entry : tablePrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } for (PrivEntry entry : resourcePrivTable.entries) { if (!includeEntrySetByResolver && entry.isSetByDomainResolver()) { continue; } userIdents.add(entry.getUserIdent()); } return userIdents; } public List<List<String>> getUserProperties(String qualifiedUser) { readLock(); try { return propertyMgr.fetchUserProperty(qualifiedUser); } catch (AnalysisException e) { return Lists.newArrayList(); } finally { readUnlock(); } } public void dropUserOfCluster(String clusterName, boolean isReplay) { writeLock(); try { Set<UserIdentity> allUserIdents = getAllUserIdents(true); for (UserIdentity userIdent : allUserIdents) { if (userIdent.getQualifiedUser().startsWith(clusterName)) { dropUserInternal(userIdent, isReplay); } } } finally { writeUnlock(); } } public Pair<String, DppConfig> getLoadClusterInfo(String qualifiedUser, String cluster) throws DdlException { readLock(); try { return propertyMgr.getLoadClusterInfo(qualifiedUser, cluster); } finally { readUnlock(); } } public boolean checkCanEnterCluster(ConnectContext ctx, String clusterName) { readLock(); try { if (checkGlobalPriv(ctx, PrivPredicate.ALL)) { return true; } if (dbPrivTable.hasClusterPriv(ctx, clusterName)) { return true; } if (tablePrivTable.hasClusterPriv(ctx, clusterName)) { return true; } return false; } finally { readUnlock(); } } private void initUser() { try { UserIdentity rootUser = new UserIdentity(ROOT_USER, "%"); rootUser.setIsAnalyzed(); createUserInternal(rootUser, PaloRole.OPERATOR_ROLE, new byte[0], true /* is replay */); UserIdentity adminUser = new UserIdentity(ADMIN_USER, "%"); adminUser.setIsAnalyzed(); createUserInternal(adminUser, PaloRole.ADMIN_ROLE, new byte[0], true /* is replay */); } catch (DdlException e) { LOG.error("should not happened", e); } } public TFetchResourceResult toResourceThrift() { readLock(); try { return propertyMgr.toResourceThrift(); } finally { readUnlock(); } } public List<List<String>> getRoleInfo() { readLock(); try { List<List<String>> results = Lists.newArrayList(); roleManager.getRoleInfo(results); return results; } finally { readUnlock(); } } public void getSchemaPrivStatus(List<TPrivilegeStatus> dbPrivResult, UserIdentity currentUser) { readLock(); try { for (PrivEntry entry : dbPrivTable.getEntries()) { DbPrivEntry dbPrivEntry = (DbPrivEntry) entry; String origDb = dbPrivEntry.getOrigDb(); String dbName = ClusterNamespace.getNameFromFullName(dbPrivEntry.getOrigDb()); if (dbName.equals("information_schema" /* Don't show privileges in information_schema */) || !checkDbPriv(currentUser, origDb, PrivPredicate.SHOW)) { continue; } String grantee = new String("\'").concat(ClusterNamespace.getNameFromFullName(dbPrivEntry.getOrigUser())) .concat("\'@\'").concat(dbPrivEntry.getOrigHost()).concat("\'"); String isGrantable = dbPrivEntry.getPrivSet().get(2) ? "YES" : "NO"; for (PaloPrivilege paloPriv : dbPrivEntry.getPrivSet().toPrivilegeList()) { if (!PaloPrivilege.privInPaloToMysql.containsKey(paloPriv)) { continue; } TPrivilegeStatus status = new TPrivilegeStatus(); status.setPrivilegeType(PaloPrivilege.privInPaloToMysql.get(paloPriv)); status.setGrantee(grantee); status.setSchema(dbName); status.setIsGrantable(isGrantable); dbPrivResult.add(status); } } } finally { readUnlock(); } } public void getGlobalPrivStatus(List<TPrivilegeStatus> userPrivResult, UserIdentity currentUser) { readLock(); try { if (!checkGlobalPriv(currentUser, PrivPredicate.SHOW)) { return; } for (PrivEntry userPrivEntry : userPrivTable.getEntries()) { String grantee = new String("\'").concat(ClusterNamespace.getNameFromFullName(userPrivEntry.getOrigUser())) .concat("\'@\'").concat(userPrivEntry.getOrigHost()).concat("\'"); String isGrantable = userPrivEntry.getPrivSet().get(2) ? "YES" : "NO"; for (PaloPrivilege paloPriv : userPrivEntry.getPrivSet().toPrivilegeList()) { if (paloPriv == PaloPrivilege.ADMIN_PRIV) { for (String priv : PaloPrivilege.privInPaloToMysql.values()) { TPrivilegeStatus status = new TPrivilegeStatus(); status.setPrivilegeType(priv); status.setGrantee(grantee); status.setIsGrantable("YES"); userPrivResult.add(status); } break; } if (!PaloPrivilege.privInPaloToMysql.containsKey(paloPriv)) { continue; } TPrivilegeStatus status = new TPrivilegeStatus(); status.setPrivilegeType(PaloPrivilege.privInPaloToMysql.get(paloPriv)); status.setGrantee(grantee); status.setIsGrantable(isGrantable); userPrivResult.add(status); } } } finally { readUnlock(); } } public static PaloAuth read(DataInput in) throws IOException { PaloAuth auth = new PaloAuth(); auth.readFields(in); return auth; } @Override public void write(DataOutput out) throws IOException { roleManager.write(out); userPrivTable.write(out); dbPrivTable.write(out); tablePrivTable.write(out); resourcePrivTable.write(out); propertyMgr.write(out); } public void readFields(DataInput in) throws IOException { roleManager = RoleManager.read(in); userPrivTable = (UserPrivTable) PrivTable.read(in); dbPrivTable = (DbPrivTable) PrivTable.read(in); tablePrivTable = (TablePrivTable) PrivTable.read(in); if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_87) { resourcePrivTable = (ResourcePrivTable) PrivTable.read(in); } propertyMgr = UserPropertyMgr.read(in); if (userPrivTable.isEmpty()) { initUser(); } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(userPrivTable).append("\n"); sb.append(dbPrivTable).append("\n"); sb.append(tablePrivTable).append("\n"); sb.append(resourcePrivTable).append("\n"); sb.append(roleManager).append("\n"); sb.append(propertyMgr).append("\n"); return sb.toString(); } }
should not add check here, the fourth child should be string type or varchar type, but maybe not string literal. signature already check the type, so remove this check please.
public AesDecryptV2 withChildren(List<Expression> children) { Preconditions.checkArgument(children.size() >= 2 && children.size() <= 4); if (children.size() == 2) { return new AesDecryptV2(children.get(0), children.get(1)); } else if (children().size() == 3) { return new AesDecryptV2(children.get(0), children.get(1), children.get(2)); } else { if (!(children.get(3) instanceof StringLiteral)) { throw new AnalysisException("the 4th parameter should be string literal: " + this.toSql()); } return new AesDecryptV2(children.get(0), children.get(1), children.get(2), (StringLiteral) children.get(3)); } }
}
public AesDecryptV2 withChildren(List<Expression> children) { Preconditions.checkArgument(children.size() >= 2 && children.size() <= 4); if (children.size() == 2) { return new AesDecryptV2(children.get(0), children.get(1)); } else if (children().size() == 3) { return new AesDecryptV2(children.get(0), children.get(1), children.get(2)); } else { return new AesDecryptV2(children.get(0), children.get(1), children.get(2), (StringLiteral) children.get(3)); } }
class AesDecryptV2 extends AesDecrypt { /** * AesDecryptV2 */ public AesDecryptV2(Expression arg0, Expression arg1) { super(arg0, arg1, getDefaultBlockEncryptionMode()); String blockEncryptionMode = String.valueOf(getDefaultBlockEncryptionMode()); if (!blockEncryptionMode.toUpperCase().equals("'AES_128_ECB'") && !blockEncryptionMode.toUpperCase().equals("'AES_192_ECB'") && !blockEncryptionMode.toUpperCase().equals("'AES_256_ECB'")) { throw new AnalysisException("Incorrect parameter count in the call to native function 'aes_decrypt'"); } } public AesDecryptV2(Expression arg0, Expression arg1, Expression arg2) { super(arg0, arg1, arg2); } public AesDecryptV2(Expression arg0, Expression arg1, Expression arg2, Expression arg3) { super(arg0, arg1, arg2, arg3); } /** * withChildren. */ @Override @Override public <R, C> R accept(ExpressionVisitor<R, C> visitor, C context) { return visitor.visitAesDecryptV2(this, context); } }
class AesDecryptV2 extends AesDecrypt { /** * AesDecryptV2 */ public AesDecryptV2(Expression arg0, Expression arg1) { super(arg0, arg1, getDefaultBlockEncryptionMode()); String blockEncryptionMode = String.valueOf(getDefaultBlockEncryptionMode()); if (!blockEncryptionMode.toUpperCase().equals("'AES_128_ECB'") && !blockEncryptionMode.toUpperCase().equals("'AES_192_ECB'") && !blockEncryptionMode.toUpperCase().equals("'AES_256_ECB'")) { throw new AnalysisException("Incorrect parameter count in the call to native function 'aes_decrypt'"); } } public AesDecryptV2(Expression arg0, Expression arg1, Expression arg2) { super(arg0, arg1, arg2); } public AesDecryptV2(Expression arg0, Expression arg1, Expression arg2, Expression arg3) { super(arg0, arg1, arg2, arg3); } /** * withChildren. */ @Override @Override public <R, C> R accept(ExpressionVisitor<R, C> visitor, C context) { return visitor.visitAesDecryptV2(this, context); } }
Will fix the same with "Adding documentation for the whole document"
private static String getDocumentationAttachment(List<String> attributes, int offset) { String offsetStr = String.join("", Collections.nCopies(offset, " ")); if (attributes == null || attributes.isEmpty()) { return String.format("%n%sdocumentation {%n%s\t%n%s}", offsetStr, offsetStr, offsetStr); } else { String joinedList = String.join(" \r\n\t", attributes); return String.format("%n%sdocumentation {%n%s\t%n\t%s%n%s}", offsetStr, offsetStr, joinedList, offsetStr); } }
} else {
private static String getDocumentationAttachment(List<String> attributes, int offset) { String offsetStr = String.join("", Collections.nCopies(offset, " ")); if (attributes == null || attributes.isEmpty()) { return String.format("%n%sdocumentation {%n%s\t%n%s}", offsetStr, offsetStr, offsetStr); } else { String joinedList = String.join(" \r\n\t", attributes); return String.format("%n%sdocumentation {%n%s\t%n\t%s%n%s}", offsetStr, offsetStr, joinedList, offsetStr); } }
class CommandArgument { private String argumentK; private String argumentV; CommandArgument(String argumentK, String argumentV) { this.argumentK = argumentK; this.argumentV = argumentV; } public String getArgumentK() { return argumentK; } public String getArgumentV() { return argumentV; } }
class CommandArgument { private String argumentK; private String argumentV; CommandArgument(String argumentK, String argumentV) { this.argumentK = argumentK; this.argumentV = argumentV; } public String getArgumentK() { return argumentK; } public String getArgumentV() { return argumentV; } }
Unrelated to this PR - I'm wondering if at some point we shouldn't use something like https://github.com/zalando/problem.
private void htmlResponse(RoutingContext event, String details, Throwable exception) { event.response().headers().set(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=utf-8"); final TemplateHtmlBuilder htmlBuilder = new TemplateHtmlBuilder("Internal Server Error", details, details); if (showStack && exception != null) { htmlBuilder.stack(exception); } writeResponse(event, htmlBuilder.toString()); }
event.response().headers().set(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=utf-8");
private void htmlResponse(RoutingContext event, String details, Throwable exception) { event.response().headers().set(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=utf-8"); final TemplateHtmlBuilder htmlBuilder = new TemplateHtmlBuilder("Internal Server Error", details, details); if (showStack && exception != null) { htmlBuilder.stack(exception); } writeResponse(event, htmlBuilder.toString()); }
class QuarkusErrorHandler implements Handler<RoutingContext> { private static final Logger log = getLogger(QuarkusErrorHandler.class); private static final Pattern ACCEPT_HEADER_SEPARATOR_PATTERN = Pattern.compile("\\s*,\\s*"); /** * we don't want to generate a new UUID each time as it is slowish. Instead we just generate one based one * and then use a counter. */ private static final String BASE_ID = UUID.randomUUID().toString() + "-"; private static final AtomicLong ERROR_COUNT = new AtomicLong(); private final boolean showStack; public QuarkusErrorHandler(boolean showStack) { this.showStack = showStack; } @Override public void handle(RoutingContext event) { try { if (event.failure() == null) { event.response().setStatusCode(event.statusCode()); event.response().end(); return; } if (event.failure() instanceof UnauthorizedException) { HttpAuthenticator authenticator = event.get(HttpAuthenticator.class.getName()); if (authenticator != null) { authenticator.sendChallenge(event).subscribe().with(new Consumer<Boolean>() { @Override public void accept(Boolean aBoolean) { event.response().end(); } }, new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { event.fail(throwable); } }); } else { event.response().setStatusCode(HttpResponseStatus.UNAUTHORIZED.code()).end(); } return; } if (event.failure() instanceof ForbiddenException) { event.response().setStatusCode(HttpResponseStatus.FORBIDDEN.code()).end(); return; } if (event.failure() instanceof AuthenticationFailedException) { event.response().setStatusCode(HttpResponseStatus.UNAUTHORIZED.code()).end(); return; } } catch (IllegalStateException e) { if (!event.response().ended()) { event.response().end(); } return; } if (!event.response().headWritten()) { event.response().setStatusCode(event.statusCode() > 0 ? event.statusCode() : 500); } String uuid = BASE_ID + ERROR_COUNT.incrementAndGet(); String details = ""; String stack = ""; Throwable exception = event.failure(); String responseContentType = null; try { responseContentType = ContentTypes.pickFirstSupportedAndAcceptedContentType(event.request()); } catch (RuntimeException e) { exception.addSuppressed(e); } if (showStack && exception != null) { details = generateHeaderMessage(exception, uuid); stack = generateStackTrace(exception); } else { details += "Error id " + uuid; } if (event.failure() instanceof IOException) { log.debugf(exception, "IOError processing HTTP request to %s failed, the client likely terminated the connection. Error id: %s", event.request().uri(), uuid); } else { log.errorf(exception, "HTTP Request to %s failed, error id: %s", event.request().uri(), uuid); } if (event.response().ended()) { return; } else if (event.response().headWritten()) { event.response().end(); return; } if (responseContentType == null) { responseContentType = ""; } switch (responseContentType) { case ContentTypes.TEXT_HTML: case ContentTypes.APPLICATION_XHTML: case ContentTypes.APPLICATION_XML: case ContentTypes.TEXT_XML: htmlResponse(event, details, exception); break; case ContentTypes.APPLICATION_JSON: case ContentTypes.TEXT_JSON: jsonResponse(event, responseContentType, details, stack); break; default: jsonResponse(event, ContentTypes.APPLICATION_JSON, details, stack); break; } } private void jsonResponse(RoutingContext event, String contentType, String details, String stack) { event.response().headers().set(HttpHeaderNames.CONTENT_TYPE, contentType + "; charset=utf-8"); String escapedDetails = escapeJsonString(details); String escapedStack = escapeJsonString(stack); StringBuilder jsonPayload = new StringBuilder("{\"details\":\"") .append(escapedDetails) .append("\",\"stack\":\"") .append(escapedStack) .append("\"}"); writeResponse(event, jsonPayload.toString()); } private void writeResponse(RoutingContext event, String output) { if (!event.response().ended()) { event.response().end(output); } } private static String generateStackTrace(final Throwable exception) { StringWriter stringWriter = new StringWriter(); exception.printStackTrace(new PrintWriter(stringWriter)); return stringWriter.toString().trim(); } private static String generateHeaderMessage(final Throwable exception, String uuid) { return String.format("Error handling %s, %s: %s", uuid, exception.getClass().getName(), extractFirstLine(exception.getMessage())); } private static String extractFirstLine(final String message) { if (null == message) { return ""; } String[] lines = message.split("\\r?\\n"); return lines[0].trim(); } static String escapeJsonString(final String text) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < text.length(); i++) { char ch = text.charAt(i); switch (ch) { case '"': sb.append("\\\""); break; case '\\': sb.append("\\\\"); break; case '\b': sb.append("\\b"); break; case '\f': sb.append("\\f"); break; case '\n': sb.append("\\n"); break; case '\r': sb.append("\\r"); break; case '\t': sb.append("\\t"); break; default: sb.append(ch); } } return sb.toString(); } private static final class ContentTypes { private ContentTypes() { } private static final String APPLICATION_JSON = "application/json"; private static final String TEXT_JSON = "text/json"; private static final String TEXT_HTML = "text/html"; private static final String APPLICATION_XML = "application/xml"; private static final String TEXT_XML = "text/xml"; private static final String APPLICATION_XHTML = "application/xhtml+xml"; private static final Set<String> SUPPORTED_SET = Set.of(APPLICATION_JSON, TEXT_JSON, TEXT_HTML, APPLICATION_XML, TEXT_XML, APPLICATION_XHTML); static String pickFirstSupportedAndAcceptedContentType(HttpServerRequest request) { List<String> acceptHeaderValues = request.headers().getAll("Accept"); String result = null; for (String accept : acceptHeaderValues) { result = pickFirstSupportedAndAcceptedContentType(accept); if (result != null) { break; } } return result; } private static String pickFirstSupportedAndAcceptedContentType(String acceptHeaderValue) { for (String part : ACCEPT_HEADER_SEPARATOR_PATTERN.split(acceptHeaderValue)) { int firstSemicolonIndex = part.indexOf(";"); if (firstSemicolonIndex >= 0) { part = part.substring(0, firstSemicolonIndex); } if (SUPPORTED_SET.contains(part)) { return part; } } return null; } } }
class QuarkusErrorHandler implements Handler<RoutingContext> { private static final Logger log = getLogger(QuarkusErrorHandler.class); /** * we don't want to generate a new UUID each time as it is slowish. Instead we just generate one based one * and then use a counter. */ private static final String BASE_ID = UUID.randomUUID().toString() + "-"; private static final AtomicLong ERROR_COUNT = new AtomicLong(); private final boolean showStack; private final Optional<HttpConfiguration.PayloadHint> contentTypeDefault; public QuarkusErrorHandler(boolean showStack, Optional<HttpConfiguration.PayloadHint> contentTypeDefault) { this.showStack = showStack; this.contentTypeDefault = contentTypeDefault; } @Override public void handle(RoutingContext event) { try { if (event.failure() == null) { event.response().setStatusCode(event.statusCode()); event.response().end(); return; } if (event.failure() instanceof UnauthorizedException) { HttpAuthenticator authenticator = event.get(HttpAuthenticator.class.getName()); if (authenticator != null) { authenticator.sendChallenge(event).subscribe().with(new Consumer<Boolean>() { @Override public void accept(Boolean aBoolean) { event.response().end(); } }, new Consumer<Throwable>() { @Override public void accept(Throwable throwable) { event.fail(throwable); } }); } else { event.response().setStatusCode(HttpResponseStatus.UNAUTHORIZED.code()).end(); } return; } if (event.failure() instanceof ForbiddenException) { event.response().setStatusCode(HttpResponseStatus.FORBIDDEN.code()).end(); return; } if (event.failure() instanceof AuthenticationFailedException) { event.response().setStatusCode(HttpResponseStatus.UNAUTHORIZED.code()).end(); return; } } catch (IllegalStateException e) { if (!event.response().ended()) { event.response().end(); } return; } if (!event.response().headWritten()) { event.response().setStatusCode(event.statusCode() > 0 ? event.statusCode() : 500); } String uuid = BASE_ID + ERROR_COUNT.incrementAndGet(); String details; String stack = ""; Throwable exception = event.failure(); String responseContentType = null; try { responseContentType = ContentTypes.pickFirstSupportedAndAcceptedContentType(event); } catch (RuntimeException e) { exception.addSuppressed(e); } if (showStack && exception != null) { details = generateHeaderMessage(exception, uuid); stack = generateStackTrace(exception); } else { details = generateHeaderMessage(uuid); } if (event.failure() instanceof IOException) { log.debugf(exception, "IOError processing HTTP request to %s failed, the client likely terminated the connection. Error id: %s", event.request().uri(), uuid); } else { log.errorf(exception, "HTTP Request to %s failed, error id: %s", event.request().uri(), uuid); } if (event.response().ended()) { return; } else if (event.response().headWritten()) { event.response().end(); return; } if (responseContentType == null) { responseContentType = ""; } switch (responseContentType) { case ContentTypes.TEXT_HTML: case ContentTypes.APPLICATION_XHTML: case ContentTypes.APPLICATION_XML: case ContentTypes.TEXT_XML: htmlResponse(event, details, exception); break; case ContentTypes.APPLICATION_JSON: case ContentTypes.TEXT_JSON: jsonResponse(event, responseContentType, details, stack); break; default: switch (contentTypeDefault.orElse(HttpConfiguration.PayloadHint.JSON)) { case HTML: htmlResponse(event, details, exception); break; case JSON: default: jsonResponse(event, ContentTypes.APPLICATION_JSON, details, stack); break; } break; } } private void jsonResponse(RoutingContext event, String contentType, String details, String stack) { event.response().headers().set(HttpHeaderNames.CONTENT_TYPE, contentType + "; charset=utf-8"); String escapedDetails = escapeJsonString(details); String escapedStack = escapeJsonString(stack); StringBuilder jsonPayload = new StringBuilder("{\"details\":\"") .append(escapedDetails) .append("\",\"stack\":\"") .append(escapedStack) .append("\"}"); writeResponse(event, jsonPayload.toString()); } private void writeResponse(RoutingContext event, String output) { if (!event.response().ended()) { event.response().end(output); } } private static String generateStackTrace(final Throwable exception) { StringWriter stringWriter = new StringWriter(); exception.printStackTrace(new PrintWriter(stringWriter)); return stringWriter.toString().trim(); } private static String generateHeaderMessage(final Throwable exception, String uuid) { return String.format("Error id %s, %s: %s", uuid, exception.getClass().getName(), extractFirstLine(exception.getMessage())); } private static String generateHeaderMessage(String uuid) { return String.format("Error id %s", uuid); } private static String extractFirstLine(final String message) { if (null == message) { return ""; } String[] lines = message.split("\\r?\\n"); return lines[0].trim(); } static String escapeJsonString(final String text) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < text.length(); i++) { char ch = text.charAt(i); switch (ch) { case '"': sb.append("\\\""); break; case '\\': sb.append("\\\\"); break; case '\b': sb.append("\\b"); break; case '\f': sb.append("\\f"); break; case '\n': sb.append("\\n"); break; case '\r': sb.append("\\r"); break; case '\t': sb.append("\\t"); break; default: sb.append(ch); } } return sb.toString(); } private static final class ContentTypes { private ContentTypes() { } private static final String APPLICATION_JSON = "application/json"; private static final String TEXT_JSON = "text/json"; private static final String TEXT_HTML = "text/html"; private static final String APPLICATION_XHTML = "application/xhtml+xml"; private static final String APPLICATION_XML = "application/xml"; private static final String TEXT_XML = "text/xml"; private static final Collection<MIMEHeader> SUPPORTED = Arrays.asList( new ParsableMIMEValue(APPLICATION_JSON).forceParse(), new ParsableMIMEValue(TEXT_JSON).forceParse(), new ParsableMIMEValue(TEXT_HTML).forceParse(), new ParsableMIMEValue(APPLICATION_XHTML).forceParse(), new ParsableMIMEValue(APPLICATION_XML).forceParse(), new ParsableMIMEValue(TEXT_XML).forceParse()); static String pickFirstSupportedAndAcceptedContentType(RoutingContext context) { List<MIMEHeader> acceptableTypes = context.parsedHeaders().accept(); MIMEHeader result = context.parsedHeaders().findBestUserAcceptedIn(acceptableTypes, SUPPORTED); return result == null ? null : result.value(); } } }
Why do we need Stack for this? Would a simple String variable work? It make sense for classes with inner classes to be able to pop() and come back to parent class... But, can we find a new Method_DEF inside a Method_def?
public void leaveToken(DetailAST token) { switch (token.getType()) { case TokenTypes.CLASS_DEF: if (!classNameStack.isEmpty()) { classNameStack.pop(); } break; case TokenTypes.METHOD_DEF: if (!methodDefStack.isEmpty()) { methodDefStack.pop(); } break; default: break; } }
methodDefStack.pop();
public void leaveToken(DetailAST token) { if (token.getType() == TokenTypes.CLASS_DEF && !classNameStack.isEmpty()) { classNameStack.pop(); } }
class name when leave the same token private Deque<String> classNameStack = new ArrayDeque<>(); private Deque<DetailAST> methodDefStack = new ArrayDeque<>(); @Override public int[] getDefaultTokens() { return getRequiredTokens(); }
class name when leave the same token private Deque<String> classNameStack = new ArrayDeque<>(); private DetailAST methodDefToken = null; @Override public int[] getDefaultTokens() { return getRequiredTokens(); }
should this be using the CoreUtils method added above?
private HttpPipeline createPipeline() { if (pipeline != null) { return pipeline; } final Configuration buildConfiguration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration; final List<HttpPipelinePolicy> httpPolicies = new ArrayList<>(); final String applicationId = CoreUtils.getApplicationId(clientOptions, httpLogOptions); httpPolicies.add(new UserAgentPolicy(applicationId, CLIENT_NAME, CLIENT_VERSION, buildConfiguration)); httpPolicies.add(new ServiceBusTokenCredentialHttpPolicy(tokenCredential)); httpPolicies.add(new AddHeadersFromContextPolicy()); httpPolicies.addAll(perCallPolicies); HttpPolicyProviders.addBeforeRetryPolicies(httpPolicies); httpPolicies.add(retryPolicy == null ? new RetryPolicy() : retryPolicy); httpPolicies.addAll(perRetryPolicies); if (clientOptions != null) { List<HttpHeader> httpHeaderList = new ArrayList<>(); clientOptions.getHeaders().forEach(h -> httpHeaderList.add(new HttpHeader(h.getName(), h.getValue()))); if (!httpHeaderList.isEmpty()) { httpPolicies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList))); } } httpPolicies.add(new HttpLoggingPolicy(httpLogOptions)); HttpPolicyProviders.addAfterRetryPolicies(httpPolicies); return new HttpPipelineBuilder() .policies(httpPolicies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .clientOptions(clientOptions) .build(); }
}
private HttpPipeline createPipeline() { if (pipeline != null) { return pipeline; } final Configuration buildConfiguration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration; final List<HttpPipelinePolicy> httpPolicies = new ArrayList<>(); final String applicationId = CoreUtils.getApplicationId(clientOptions, httpLogOptions); httpPolicies.add(new UserAgentPolicy(applicationId, CLIENT_NAME, CLIENT_VERSION, buildConfiguration)); httpPolicies.add(new ServiceBusTokenCredentialHttpPolicy(tokenCredential)); httpPolicies.add(new AddHeadersFromContextPolicy()); httpPolicies.addAll(perCallPolicies); HttpPolicyProviders.addBeforeRetryPolicies(httpPolicies); httpPolicies.add(retryPolicy == null ? new RetryPolicy() : retryPolicy); httpPolicies.addAll(perRetryPolicies); if (clientOptions != null) { List<HttpHeader> httpHeaderList = new ArrayList<>(); clientOptions.getHeaders().forEach(h -> httpHeaderList.add(new HttpHeader(h.getName(), h.getValue()))); if (!httpHeaderList.isEmpty()) { httpPolicies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList))); } } httpPolicies.add(new HttpLoggingPolicy(httpLogOptions)); HttpPolicyProviders.addAfterRetryPolicies(httpPolicies); return new HttpPipelineBuilder() .policies(httpPolicies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .clientOptions(clientOptions) .build(); }
class ServiceBusAdministrationClientBuilder { private static final String CLIENT_NAME; private static final String CLIENT_VERSION; static { Map<String, String> properties = CoreUtils.getProperties("azure-messaging-servicebus.properties"); CLIENT_NAME = properties.getOrDefault("name", "UnknownName"); CLIENT_VERSION = properties.getOrDefault("version", "UnknownVersion"); } private final ClientLogger logger = new ClientLogger(ServiceBusAdministrationClientBuilder.class); private final ServiceBusManagementSerializer serializer = new ServiceBusManagementSerializer(); private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>(); private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>(); private Configuration configuration; private String endpoint; private HttpClient httpClient; private HttpLogOptions httpLogOptions = new HttpLogOptions(); private HttpPipeline pipeline; private HttpPipelinePolicy retryPolicy; private TokenCredential tokenCredential; private ServiceBusServiceVersion serviceVersion; private ClientOptions clientOptions; /** * Constructs a builder with the default parameters. */ public ServiceBusAdministrationClientBuilder() { } /** * Creates a {@link ServiceBusAdministrationAsyncClient} based on options set in the builder. Every time {@code * buildAsyncClient} is invoked, a new instance of the client is created. * * <p>If {@link * {@link * other builder settings are ignored.</p> * * @return A {@link ServiceBusAdministrationAsyncClient} with the options set in the builder. * @throws NullPointerException if {@code endpoint} has not been set. This is automatically set when {@link * * {@link * @throws IllegalStateException If applicationId if set in both {@code httpLogOptions} and {@code clientOptions} * and not same. */ public ServiceBusAdministrationAsyncClient buildAsyncClient() { if (endpoint == null) { throw logger.logExceptionAsError(new NullPointerException("'endpoint' cannot be null.")); } final ServiceBusServiceVersion apiVersion = serviceVersion == null ? ServiceBusServiceVersion.getLatest() : serviceVersion; final HttpPipeline httpPipeline = createPipeline(); final ServiceBusManagementClientImpl client = new ServiceBusManagementClientImplBuilder() .pipeline(httpPipeline) .serializerAdapter(serializer) .endpoint(endpoint) .apiVersion(apiVersion.getVersion()) .buildClient(); return new ServiceBusAdministrationAsyncClient(client, serializer); } /** * Creates a {@link ServiceBusAdministrationClient} based on options set in the builder. Every time {@code * buildClient} is invoked, a new instance of the client is created. * * <p>If {@link * {@link * other builder settings are ignored.</p> * * @return A {@link ServiceBusAdministrationClient} with the options set in the builder. * @throws NullPointerException if {@code endpoint} has not been set. This is automatically set when {@link * * {@link * @throws IllegalStateException If applicationId if set in both {@code httpLogOptions} and {@code clientOptions} * and not same. */ public ServiceBusAdministrationClient buildClient() { return new ServiceBusAdministrationClient(buildAsyncClient()); } /** * Adds a policy to the set of existing policies that are executed after required policies. * * @param policy The retry policy for service requests. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * @throws NullPointerException If {@code policy} is {@code null}. */ public ServiceBusAdministrationClientBuilder addPolicy(HttpPipelinePolicy policy) { Objects.requireNonNull(policy); if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) { perCallPolicies.add(policy); } else { perRetryPolicies.add(policy); } return this; } /** * Sets the service endpoint for the Service Bus namespace. * * @param endpoint The URL of the Service Bus namespace. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * @throws NullPointerException if {@code endpoint} is null. * @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL. */ public ServiceBusAdministrationClientBuilder endpoint(String endpoint) { final URL url; try { url = new URL(Objects.requireNonNull(endpoint, "'endpoint' cannot be null.")); } catch (MalformedURLException ex) { throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL")); } this.endpoint = url.getHost(); return this; } /** * Sets the configuration store that is used during construction of the service client. * * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the connection string for a Service Bus namespace or a specific Service Bus resource. * * @param connectionString Connection string for a Service Bus namespace or a specific Service Bus resource. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * @throws NullPointerException If {@code connectionString} is {@code null}. * @throws IllegalArgumentException If {@code connectionString} is an entity specific connection string, and not * a {@code connectionString} for the Service Bus namespace. */ public ServiceBusAdministrationClientBuilder connectionString(String connectionString) { Objects.requireNonNull(connectionString, "'connectionString' cannot be null."); final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); final TokenCredential tokenCredential; try { tokenCredential = new ServiceBusSharedKeyCredential(properties.getSharedAccessKeyName(), properties.getSharedAccessKey(), ServiceBusConstants.TOKEN_VALIDITY); } catch (Exception e) { throw logger.logExceptionAsError( new AzureException("Could not create the ServiceBusSharedKeyCredential.", e)); } this.endpoint = properties.getEndpoint().getHost(); if (properties.getEntityPath() != null && !properties.getEntityPath().isEmpty()) { throw logger.logExceptionAsError(new IllegalArgumentException( "'connectionString' cannot contain an EntityPath. It should be a namespace connection string.")); } return credential(properties.getEndpoint().getHost(), tokenCredential); } /** * Sets the credential used to authenticate HTTP requests to the Service Bus namespace. * * @param fullyQualifiedNamespace for the Service Bus. * @param credential {@link TokenCredential} to be used for authentication. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder credential(String fullyQualifiedNamespace, TokenCredential credential) { this.endpoint = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.tokenCredential = Objects.requireNonNull(credential, "'credential' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw logger.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return this; } /** * Sets the HTTP client to use for sending and receiving requests to and from the service. * * @param client The HTTP client to use for requests. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder httpClient(HttpClient client) { if (this.httpClient != null && client == null) { logger.info("HttpClient is being set to 'null' when it was previously configured."); } this.httpClient = client; return this; } /** * Sets the logging configuration for HTTP requests and responses. * * <p>If logLevel is not provided, default value of {@link HttpLogDetailLevel * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder httpLogOptions(HttpLogOptions logOptions) { httpLogOptions = logOptions; return this; } /** * Sets the {@link ClientOptions} which enables various options to be set on the client. For example setting * {@code applicationId} using {@link ClientOptions * for telemetry/monitoring purpose. * <p> * * @param clientOptions to be set on the client. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * * @see <a href="https: * policy</a> */ public ServiceBusAdministrationClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Sets the HTTP pipeline to use for the service client. * * If {@code pipeline} is set, all other settings are ignored, aside from {@link * ServiceBusAdministrationClientBuilder * or {@link ServiceBusAdministrationAsyncClient}. * * @param pipeline The HTTP pipeline to use for sending service requests and receiving responses. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder pipeline(HttpPipeline pipeline) { if (this.pipeline != null && pipeline == null) { logger.info("HttpPipeline is being set to 'null' when it was previously configured."); } this.pipeline = pipeline; return this; } /** * Sets the {@link HttpPipelinePolicy} that is used when each request is sent. * * The default retry policy will be used if not provided {@link * to build {@link ServiceBusAdministrationClient} or {@link ServiceBusAdministrationAsyncClient}. * * @param retryPolicy The user's retry policy applied to each request. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder retryPolicy(HttpPipelinePolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link ServiceBusServiceVersion} that is used. By default {@link ServiceBusServiceVersion * is used when none is specified. * * @param serviceVersion Service version to use. * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder serviceVersion(ServiceBusServiceVersion serviceVersion) { this.serviceVersion = serviceVersion; return this; } /** * Builds a new HTTP pipeline if none is set, or returns a user-provided one. * * @return A new HTTP pipeline or the user-defined one from {@link * @throws IllegalStateException if applicationId is not same in httpLogOptions and clientOptions. */ }
class ServiceBusAdministrationClientBuilder { private static final String CLIENT_NAME; private static final String CLIENT_VERSION; static { Map<String, String> properties = CoreUtils.getProperties("azure-messaging-servicebus.properties"); CLIENT_NAME = properties.getOrDefault("name", "UnknownName"); CLIENT_VERSION = properties.getOrDefault("version", "UnknownVersion"); } private final ClientLogger logger = new ClientLogger(ServiceBusAdministrationClientBuilder.class); private final ServiceBusManagementSerializer serializer = new ServiceBusManagementSerializer(); private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>(); private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>(); private Configuration configuration; private String endpoint; private HttpClient httpClient; private HttpLogOptions httpLogOptions = new HttpLogOptions(); private HttpPipeline pipeline; private HttpPipelinePolicy retryPolicy; private TokenCredential tokenCredential; private ServiceBusServiceVersion serviceVersion; private ClientOptions clientOptions; /** * Constructs a builder with the default parameters. */ public ServiceBusAdministrationClientBuilder() { } /** * Creates a {@link ServiceBusAdministrationAsyncClient} based on options set in the builder. Every time {@code * buildAsyncClient} is invoked, a new instance of the client is created. * * <p>If {@link * {@link * other builder settings are ignored.</p> * * @return A {@link ServiceBusAdministrationAsyncClient} with the options set in the builder. * @throws NullPointerException if {@code endpoint} has not been set. This is automatically set when {@link * * {@link * @throws IllegalStateException If applicationId if set in both {@code httpLogOptions} and {@code clientOptions} * and not same. */ public ServiceBusAdministrationAsyncClient buildAsyncClient() { if (endpoint == null) { throw logger.logExceptionAsError(new NullPointerException("'endpoint' cannot be null.")); } final ServiceBusServiceVersion apiVersion = serviceVersion == null ? ServiceBusServiceVersion.getLatest() : serviceVersion; final HttpPipeline httpPipeline = createPipeline(); final ServiceBusManagementClientImpl client = new ServiceBusManagementClientImplBuilder() .pipeline(httpPipeline) .serializerAdapter(serializer) .endpoint(endpoint) .apiVersion(apiVersion.getVersion()) .buildClient(); return new ServiceBusAdministrationAsyncClient(client, serializer); } /** * Creates a {@link ServiceBusAdministrationClient} based on options set in the builder. Every time {@code * buildClient} is invoked, a new instance of the client is created. * * <p>If {@link * {@link * other builder settings are ignored.</p> * * @return A {@link ServiceBusAdministrationClient} with the options set in the builder. * @throws NullPointerException if {@code endpoint} has not been set. This is automatically set when {@link * * {@link * @throws IllegalStateException If applicationId if set in both {@code httpLogOptions} and {@code clientOptions} * and not same. */ public ServiceBusAdministrationClient buildClient() { return new ServiceBusAdministrationClient(buildAsyncClient()); } /** * Adds a policy to the set of existing policies that are executed after required policies. * * @param policy The retry policy for service requests. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * @throws NullPointerException If {@code policy} is {@code null}. */ public ServiceBusAdministrationClientBuilder addPolicy(HttpPipelinePolicy policy) { Objects.requireNonNull(policy); if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) { perCallPolicies.add(policy); } else { perRetryPolicies.add(policy); } return this; } /** * Sets the service endpoint for the Service Bus namespace. * * @param endpoint The URL of the Service Bus namespace. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * @throws NullPointerException if {@code endpoint} is null. * @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL. */ public ServiceBusAdministrationClientBuilder endpoint(String endpoint) { final URL url; try { url = new URL(Objects.requireNonNull(endpoint, "'endpoint' cannot be null.")); } catch (MalformedURLException ex) { throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL")); } this.endpoint = url.getHost(); return this; } /** * Sets the configuration store that is used during construction of the service client. * * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the connection string for a Service Bus namespace or a specific Service Bus resource. * * @param connectionString Connection string for a Service Bus namespace or a specific Service Bus resource. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * @throws NullPointerException If {@code connectionString} is {@code null}. * @throws IllegalArgumentException If {@code connectionString} is an entity specific connection string, and not * a {@code connectionString} for the Service Bus namespace. */ public ServiceBusAdministrationClientBuilder connectionString(String connectionString) { Objects.requireNonNull(connectionString, "'connectionString' cannot be null."); final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); final TokenCredential tokenCredential; try { tokenCredential = new ServiceBusSharedKeyCredential(properties.getSharedAccessKeyName(), properties.getSharedAccessKey(), ServiceBusConstants.TOKEN_VALIDITY); } catch (Exception e) { throw logger.logExceptionAsError( new AzureException("Could not create the ServiceBusSharedKeyCredential.", e)); } this.endpoint = properties.getEndpoint().getHost(); if (properties.getEntityPath() != null && !properties.getEntityPath().isEmpty()) { throw logger.logExceptionAsError(new IllegalArgumentException( "'connectionString' cannot contain an EntityPath. It should be a namespace connection string.")); } return credential(properties.getEndpoint().getHost(), tokenCredential); } /** * Sets the credential used to authenticate HTTP requests to the Service Bus namespace. * * @param fullyQualifiedNamespace for the Service Bus. * @param credential {@link TokenCredential} to be used for authentication. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder credential(String fullyQualifiedNamespace, TokenCredential credential) { this.endpoint = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.tokenCredential = Objects.requireNonNull(credential, "'credential' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw logger.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return this; } /** * Sets the HTTP client to use for sending and receiving requests to and from the service. * * @param client The HTTP client to use for requests. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder httpClient(HttpClient client) { if (this.httpClient != null && client == null) { logger.info("HttpClient is being set to 'null' when it was previously configured."); } this.httpClient = client; return this; } /** * Sets the logging configuration for HTTP requests and responses. * * <p>If logLevel is not provided, default value of {@link HttpLogDetailLevel * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder httpLogOptions(HttpLogOptions logOptions) { httpLogOptions = logOptions; return this; } /** * Sets the {@link ClientOptions} which enables various options to be set on the client. For example setting * {@code applicationId} using {@link ClientOptions * for telemetry/monitoring purpose. * <p> * * @param clientOptions to be set on the client. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. * * @see <a href="https: * policy</a> */ public ServiceBusAdministrationClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Sets the HTTP pipeline to use for the service client. * * If {@code pipeline} is set, all other settings are ignored, aside from {@link * ServiceBusAdministrationClientBuilder * or {@link ServiceBusAdministrationAsyncClient}. * * @param pipeline The HTTP pipeline to use for sending service requests and receiving responses. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder pipeline(HttpPipeline pipeline) { if (this.pipeline != null && pipeline == null) { logger.info("HttpPipeline is being set to 'null' when it was previously configured."); } this.pipeline = pipeline; return this; } /** * Sets the {@link HttpPipelinePolicy} that is used when each request is sent. * * The default retry policy will be used if not provided {@link * to build {@link ServiceBusAdministrationClient} or {@link ServiceBusAdministrationAsyncClient}. * * @param retryPolicy The user's retry policy applied to each request. * * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder retryPolicy(HttpPipelinePolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link ServiceBusServiceVersion} that is used. By default {@link ServiceBusServiceVersion * is used when none is specified. * * @param serviceVersion Service version to use. * @return The updated {@link ServiceBusAdministrationClientBuilder} object. */ public ServiceBusAdministrationClientBuilder serviceVersion(ServiceBusServiceVersion serviceVersion) { this.serviceVersion = serviceVersion; return this; } /** * Builds a new HTTP pipeline if none is set, or returns a user-provided one. * * @return A new HTTP pipeline or the user-defined one from {@link * @throws IllegalStateException if applicationId is not same in httpLogOptions and clientOptions. */ }
More fuel for the "node states should be immutable" fire... 🔥
NodeState getNodeState(int index) { NodeState ns = nodeStates.get(index); if (ns != null) return ns; return (index >= getMaxIndex() || ! upNodes.get(index)) ? new NodeState(type, State.DOWN) : new NodeState(type, State.UP); }
: new NodeState(type, State.UP);
NodeState getNodeState(int index) { NodeState ns = nodeStates.get(index); if (ns != null) return ns; return (index >= getLogicalNodeCount() || ! upNodes.get(index)) ? new NodeState(type, State.DOWN) : new NodeState(type, State.UP); }
class Nodes { private int maxIndex; private final NodeType type; private final BitSet upNodes; private final Map<Integer, NodeState> nodeStates = new HashMap<>(); Nodes(NodeType type) { this.type = type; upNodes = new BitSet(); } Nodes(Nodes b) { maxIndex = b.maxIndex; type = b.type; upNodes = (BitSet) b.upNodes.clone(); b.nodeStates.forEach((key, value) -> nodeStates.put(key, value.clone())); } void updateMaxIndex(int index) { if (index > maxIndex) { upNodes.set(maxIndex, index); maxIndex = index; } } int getMaxIndex() { return maxIndex; } private void validateInput(Node node, NodeState ns) { ns.verifyValidInSystemState(node.getType()); if (node.getType() != type) { throw new IllegalArgumentException("NodeType '" + node.getType() + "' differs from '" + type + "'"); } } void setNodeState(Node node, NodeState ns) { validateInput(node, ns); int index = node.getIndex(); if (index >= maxIndex) { maxIndex = index + 1; } setNodeStateInternal(index, ns); } void addNodeState(Node node, NodeState ns) { validateInput(node, ns); int index = node.getIndex(); updateMaxIndex(index + 1); setNodeStateInternal(index, ns); } private static boolean equalsWithDescription(NodeState a, NodeState b) { return a.equals(b) && ((a.getState() != State.DOWN) || a.getDescription().equals(b.getDescription())); } private void setNodeStateInternal(int index, NodeState ns) { nodeStates.remove(index); if (ns.getState() == State.DOWN) { upNodes.clear(index); if ( ! equalsWithDescription(defaultDown(), ns)) { nodeStates.put(index, ns); } } else { upNodes.set(index); if ( ! equalsWithDescription(defaultUp(), ns)) { nodeStates.put(index, ns); } } } boolean similarToImpl(Nodes other, final NodeStateCmp nodeStateCmp) { if (maxIndex != other.maxIndex) return false; if (type != other.type) return false; if ( ! upNodes.equals(other.upNodes)) return false; for (Integer node : unionNodeSetWith(other.nodeStates.keySet())) { final NodeState lhs = nodeStates.get(node); final NodeState rhs = other.nodeStates.get(node); if (!nodeStateCmp.similar(type, lhs, rhs)) { return false; } } return true; } private Set<Integer> unionNodeSetWith(final Set<Integer> otherNodes) { final Set<Integer> unionNodeSet = new HashSet<>(nodeStates.keySet()); unionNodeSet.addAll(otherNodes); return unionNodeSet; } @Override public String toString() { return toString(false); } String toString(boolean verbose) { StringBuilder sb = new StringBuilder(); int nodeCount = verbose ? getMaxIndex() : upNodes.length(); if ( nodeCount > 0 ) { sb.append(type == NodeType.DISTRIBUTOR ? " distributor:" : " storage:").append(nodeCount); for (int i = 0; i < nodeCount; i++) { String nodeState = getNodeState(i).serialize(i, verbose); if (!nodeState.isEmpty()) { sb.append(' ').append(nodeState); } } } return sb.toString(); } @Override public boolean equals(Object obj) { if (! (obj instanceof Nodes)) return false; Nodes b = (Nodes) obj; if (maxIndex != b.maxIndex) return false; if (type != b.type) return false; if (!upNodes.equals(b.upNodes)) return false; if (!nodeStates.equals(b.nodeStates)) return false; return true; } @Override public int hashCode() { return Objects.hash(maxIndex, type, nodeStates, upNodes); } private NodeState defaultDown() { return type == NodeType.STORAGE ? DEFAULT_STORAGE_DOWN_NODE_STATE : DEFAULT_DISTRIBUTOR_DOWN_NODE_STATE; } private NodeState defaultUp() { return defaultUpNodeState(type); } }
class Nodes { private int logicalNodeCount; private final NodeType type; private final BitSet upNodes; private final Map<Integer, NodeState> nodeStates = new HashMap<>(); Nodes(NodeType type) { this.type = type; upNodes = new BitSet(); } Nodes(Nodes b) { logicalNodeCount = b.logicalNodeCount; type = b.type; upNodes = (BitSet) b.upNodes.clone(); b.nodeStates.forEach((key, value) -> nodeStates.put(key, value.clone())); } void updateMaxIndex(int index) { if (index > logicalNodeCount) { upNodes.set(logicalNodeCount, index); logicalNodeCount = index; } } int getLogicalNodeCount() { return logicalNodeCount; } private void validateInput(Node node, NodeState ns) { ns.verifyValidInSystemState(node.getType()); if (node.getType() != type) { throw new IllegalArgumentException("NodeType '" + node.getType() + "' differs from '" + type + "'"); } } void setNodeState(Node node, NodeState ns) { validateInput(node, ns); int index = node.getIndex(); if (index >= logicalNodeCount) { logicalNodeCount = index + 1; } setNodeStateInternal(index, ns); } void addNodeState(Node node, NodeState ns) { validateInput(node, ns); int index = node.getIndex(); updateMaxIndex(index + 1); setNodeStateInternal(index, ns); } private static boolean equalsWithDescription(NodeState a, NodeState b) { return a.equals(b) && ((a.getState() != State.DOWN) || a.getDescription().equals(b.getDescription())); } private void setNodeStateInternal(int index, NodeState ns) { nodeStates.remove(index); if (ns.getState() == State.DOWN) { upNodes.clear(index); if ( ! equalsWithDescription(defaultDown(), ns)) { nodeStates.put(index, ns); } } else { upNodes.set(index); if ( ! equalsWithDescription(defaultUp(), ns)) { nodeStates.put(index, ns); } } } boolean similarToImpl(Nodes other, final NodeStateCmp nodeStateCmp) { if (logicalNodeCount != other.logicalNodeCount) return false; if (type != other.type) return false; if ( ! upNodes.equals(other.upNodes)) return false; for (Integer node : unionNodeSetWith(other.nodeStates.keySet())) { final NodeState lhs = nodeStates.get(node); final NodeState rhs = other.nodeStates.get(node); if (!nodeStateCmp.similar(type, lhs, rhs)) { return false; } } return true; } private Set<Integer> unionNodeSetWith(final Set<Integer> otherNodes) { final Set<Integer> unionNodeSet = new HashSet<>(nodeStates.keySet()); unionNodeSet.addAll(otherNodes); return unionNodeSet; } @Override public String toString() { return toString(false); } String toString(boolean verbose) { StringBuilder sb = new StringBuilder(); int nodeCount = verbose ? getLogicalNodeCount() : upNodes.length(); if ( nodeCount > 0 ) { sb.append(type == NodeType.DISTRIBUTOR ? " distributor:" : " storage:").append(nodeCount); for (int i = 0; i < nodeCount; i++) { String nodeState = getNodeState(i).serialize(i, verbose); if (!nodeState.isEmpty()) { sb.append(' ').append(nodeState); } } } return sb.toString(); } @Override public boolean equals(Object obj) { if (! (obj instanceof Nodes)) return false; Nodes b = (Nodes) obj; if (logicalNodeCount != b.logicalNodeCount) return false; if (type != b.type) return false; if (!upNodes.equals(b.upNodes)) return false; if (!nodeStates.equals(b.nodeStates)) return false; return true; } @Override public int hashCode() { return Objects.hash(logicalNodeCount, type, nodeStates, upNodes); } private NodeState defaultDown() { return type == NodeType.STORAGE ? DEFAULT_STORAGE_DOWN_NODE_STATE : DEFAULT_DISTRIBUTOR_DOWN_NODE_STATE; } private NodeState defaultUp() { return defaultUpNodeState(type); } }
I'm not sure why this change can fix database lost, could you please add some comment to explain it?
public long loadCluster(DataInputStream dis, long checksum) throws IOException, DdlException { if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_30) { int clusterCount = dis.readInt(); checksum ^= clusterCount; for (long i = 0; i < clusterCount; ++i) { final Cluster cluster = Cluster.read(dis); checksum ^= cluster.getId(); List<Long> latestBackendIds = systemInfo.getClusterBackendIds(cluster.getName()); if (latestBackendIds.size() != cluster.getBackendIdList().size()) { LOG.warn("Cluster:" + cluster.getName() + ", backends in Cluster is " + cluster.getBackendIdList().size() + ", backends in SystemInfoService is " + cluster.getBackendIdList().size()); } cluster.setBackendIdList(latestBackendIds); String dbName = ClusterNamespace.getFullName(cluster.getName(), InfoSchemaDb.DATABASE_NAME); if (!fullNameToDb.containsKey(dbName)) { final InfoSchemaDb db = new InfoSchemaDb(cluster.getName()); db.setClusterName(cluster.getName()); String errMsg = "InfoSchemaDb id shouldn't larger than 10000, please restart your FE server"; Preconditions.checkState(db.getId() < NEXT_ID_INIT_VALUE, errMsg); idToDb.put(db.getId(), db); fullNameToDb.put(db.getFullName(), db); } cluster.addDb(dbName, fullNameToDb.get(dbName).getId()); idToCluster.put(cluster.getId(), cluster); nameToCluster.put(cluster.getName(), cluster); } } return checksum; }
cluster.addDb(dbName, fullNameToDb.get(dbName).getId());
public long loadCluster(DataInputStream dis, long checksum) throws IOException, DdlException { if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_30) { int clusterCount = dis.readInt(); checksum ^= clusterCount; for (long i = 0; i < clusterCount; ++i) { final Cluster cluster = Cluster.read(dis); checksum ^= cluster.getId(); List<Long> latestBackendIds = systemInfo.getClusterBackendIds(cluster.getName()); if (latestBackendIds.size() != cluster.getBackendIdList().size()) { LOG.warn("Cluster:" + cluster.getName() + ", backends in Cluster is " + cluster.getBackendIdList().size() + ", backends in SystemInfoService is " + cluster.getBackendIdList().size()); } cluster.setBackendIdList(latestBackendIds); String dbName = ClusterNamespace.getFullName(cluster.getName(), InfoSchemaDb.DATABASE_NAME); if (!fullNameToDb.containsKey(dbName)) { final InfoSchemaDb db = new InfoSchemaDb(cluster.getName()); db.setClusterName(cluster.getName()); String errMsg = "InfoSchemaDb id shouldn't larger than 10000, please restart your FE server"; Preconditions.checkState(db.getId() < NEXT_ID_INIT_VALUE, errMsg); idToDb.put(db.getId(), db); fullNameToDb.put(db.getFullName(), db); } cluster.addDb(dbName, fullNameToDb.get(dbName).getId()); idToCluster.put(cluster.getId(), cluster); nameToCluster.put(cluster.getName(), cluster); } } return checksum; }
class SingletonHolder { private static final Catalog INSTANCE = new Catalog(); }
class SingletonHolder { private static final Catalog INSTANCE = new Catalog(); }
I strongly dislike this test. (sorry!) What is the actual _behavior_ that should change based on the mock. I really don't think a mock is necessary or a good idea here.
public void testLaunchFnHarnessAndTeardownCleanly() throws Exception { Function<String, String> environmentVariableMock = mock(Function.class); PipelineOptions options = PipelineOptionsFactory.create(); when(environmentVariableMock.apply("HARNESS_ID")).thenReturn("id"); when(environmentVariableMock.apply("PIPELINE_OPTIONS")) .thenReturn(PipelineOptionsTranslation.toJson(options)); List<BeamFnApi.LogEntry> logEntries = new ArrayList<>(); List<BeamFnApi.InstructionResponse> instructionResponses = mock(List.class); BeamFnLoggingGrpc.BeamFnLoggingImplBase loggingService = new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<LogControl> responseObserver) { return TestStreams.withOnNext( (BeamFnApi.LogEntry.List entries) -> logEntries.addAll(entries.getLogEntriesList())) .withOnCompleted(responseObserver::onCompleted) .build(); } }; BeamFnControlGrpc.BeamFnControlImplBase controlService = new BeamFnControlGrpc.BeamFnControlImplBase() { @Override public StreamObserver<InstructionResponse> control( StreamObserver<InstructionRequest> responseObserver) { CountDownLatch waitForResponses = new CountDownLatch(1 /* number of responses expected */); options .as(GcsOptions.class) .getExecutorService() .submit( () -> { responseObserver.onNext(INSTRUCTION_REQUEST); Uninterruptibles.awaitUninterruptibly(waitForResponses); responseObserver.onCompleted(); }); return TestStreams.withOnNext( (InstructionResponse t) -> { instructionResponses.add(t); waitForResponses.countDown(); }) .withOnCompleted(waitForResponses::countDown) .build(); } }; Server loggingServer = ServerBuilder.forPort(0).addService(loggingService).build(); loggingServer.start(); try { Server controlServer = ServerBuilder.forPort(0).addService(controlService).build(); controlServer.start(); try { Endpoints.ApiServiceDescriptor loggingDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + loggingServer.getPort()) .build(); Endpoints.ApiServiceDescriptor controlDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + controlServer.getPort()) .build(); when(environmentVariableMock.apply("LOGGING_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(loggingDescriptor)); when(environmentVariableMock.apply("CONTROL_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(controlDescriptor)); FnHarness.main(environmentVariableMock); } finally { controlServer.shutdownNow(); } } finally { loggingServer.shutdownNow(); } InOrder inOrder = inOrder(onStartupMock, beforeProcessingMock, environmentVariableMock, instructionResponses); inOrder.verify(onStartupMock).run(); inOrder.verify(environmentVariableMock, atLeastOnce()).apply(any()); inOrder.verify(beforeProcessingMock).accept(any()); inOrder.verify(instructionResponses).add(INSTRUCTION_RESPONSE); }
inOrder.verify(instructionResponses).add(INSTRUCTION_RESPONSE);
public void testLaunchFnHarnessAndTeardownCleanly() throws Exception { Function<String, String> environmentVariableMock = mock(Function.class); PipelineOptions options = PipelineOptionsFactory.create(); when(environmentVariableMock.apply("HARNESS_ID")).thenReturn("id"); when(environmentVariableMock.apply("PIPELINE_OPTIONS")) .thenReturn(PipelineOptionsTranslation.toJson(options)); List<BeamFnApi.LogEntry> logEntries = new ArrayList<>(); List<BeamFnApi.InstructionResponse> instructionResponses = mock(List.class); BeamFnLoggingGrpc.BeamFnLoggingImplBase loggingService = new BeamFnLoggingGrpc.BeamFnLoggingImplBase() { @Override public StreamObserver<BeamFnApi.LogEntry.List> logging( StreamObserver<LogControl> responseObserver) { return TestStreams.withOnNext( (BeamFnApi.LogEntry.List entries) -> logEntries.addAll(entries.getLogEntriesList())) .withOnCompleted(responseObserver::onCompleted) .build(); } }; BeamFnControlGrpc.BeamFnControlImplBase controlService = new BeamFnControlGrpc.BeamFnControlImplBase() { @Override public StreamObserver<InstructionResponse> control( StreamObserver<InstructionRequest> responseObserver) { CountDownLatch waitForResponses = new CountDownLatch(1 /* number of responses expected */); options .as(GcsOptions.class) .getExecutorService() .submit( () -> { responseObserver.onNext(INSTRUCTION_REQUEST); Uninterruptibles.awaitUninterruptibly(waitForResponses); responseObserver.onCompleted(); }); return TestStreams.withOnNext( (InstructionResponse t) -> { instructionResponses.add(t); waitForResponses.countDown(); }) .withOnCompleted(waitForResponses::countDown) .build(); } }; Server loggingServer = ServerBuilder.forPort(0).addService(loggingService).build(); loggingServer.start(); try { Server controlServer = ServerBuilder.forPort(0).addService(controlService).build(); controlServer.start(); try { Endpoints.ApiServiceDescriptor loggingDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + loggingServer.getPort()) .build(); Endpoints.ApiServiceDescriptor controlDescriptor = Endpoints.ApiServiceDescriptor.newBuilder() .setUrl("localhost:" + controlServer.getPort()) .build(); when(environmentVariableMock.apply("LOGGING_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(loggingDescriptor)); when(environmentVariableMock.apply("CONTROL_API_SERVICE_DESCRIPTOR")) .thenReturn(TextFormat.printToString(controlDescriptor)); FnHarness.main(environmentVariableMock); } finally { controlServer.shutdownNow(); } } finally { loggingServer.shutdownNow(); } InOrder inOrder = inOrder(onStartupMock, beforeProcessingMock, environmentVariableMock, instructionResponses); inOrder.verify(onStartupMock).run(); inOrder.verify(environmentVariableMock, atLeastOnce()).apply(any()); inOrder.verify(beforeProcessingMock).accept(any()); inOrder.verify(instructionResponses).add(INSTRUCTION_RESPONSE); }
class FnHarnessTestInitializer extends BeamWorkerInitializer { @Override public void onStartup() { onStartupMock.run(); } @Override public void beforeProcessing(PipelineOptions options) { beforeProcessingMock.accept(options); } }
class FnHarnessTestInitializer implements JvmInitializer { @Override public void onStartup() { onStartupMock.run(); } @Override public void beforeProcessing(PipelineOptions options) { beforeProcessingMock.accept(options); } }
Do we add a UT to cover the branch of `!containsInTableContainedRule(tableName, materials)` ?
private void refresh(final CreateTableStatement createTableStatement) throws SQLException { createTableStatement.setTable(new SimpleTableSegment(new TableNameSegment(1, 3, new IdentifierValue("t_order_0")))); Map<String, DataSource> dataSourceMap = mock(HashMap.class); when(materials.getDataSourceMap()).thenReturn(dataSourceMap); DataSource dataSource = mock(DataSource.class); when(dataSourceMap.get(eq("ds"))).thenReturn(dataSource); Connection connection = mock(Connection.class); when(dataSource.getConnection()).thenReturn(connection); DatabaseMetaData metaData = mock(DatabaseMetaData.class); when(connection.getMetaData()).thenReturn(metaData); ResultSet resultSet = mock(ResultSet.class); when(metaData.getTables(any(), any(), any(), any())).thenReturn(resultSet); when(resultSet.next()).thenReturn(false); ShardingSphereSchema schema = ShardingSphereSchemaBuildUtil.buildSchema(); SchemaRefresher<CreateTableStatement> schemaRefresher = new CreateTableStatementSchemaRefresher(); schemaRefresher.refresh(schema, Collections.singleton("ds"), createTableStatement, materials); assertTrue(schema.containsTable("t_order_0")); }
schemaRefresher.refresh(schema, Collections.singleton("ds"), createTableStatement, materials);
private void refresh(final CreateTableStatement createTableStatement) throws SQLException { createTableStatement.setTable(new SimpleTableSegment(new TableNameSegment(1, 3, new IdentifierValue("t_order_0")))); Map<String, DataSource> dataSourceMap = mock(HashMap.class); when(materials.getDataSourceMap()).thenReturn(dataSourceMap); DataSource dataSource = mock(DataSource.class); when(dataSourceMap.get(eq("ds"))).thenReturn(dataSource); Connection connection = mock(Connection.class); when(dataSource.getConnection()).thenReturn(connection); DatabaseMetaData metaData = mock(DatabaseMetaData.class); when(connection.getMetaData()).thenReturn(metaData); ResultSet resultSet = mock(ResultSet.class); when(metaData.getTables(any(), any(), any(), any())).thenReturn(resultSet); when(resultSet.next()).thenReturn(false); ShardingSphereSchema schema = ShardingSphereSchemaBuildUtil.buildSchema(); SchemaRefresher<CreateTableStatement> schemaRefresher = new CreateTableStatementSchemaRefresher(); schemaRefresher.refresh(schema, Collections.singleton("ds"), createTableStatement, materials); assertTrue(schema.containsTable("t_order_0")); }
class CreateTableStatementSchemaRefresherTest { private SchemaBuilderMaterials materials = mock(SchemaBuilderMaterials.class); @Test public void refreshForMySQL() throws SQLException { MySQLCreateTableStatement createTableStatement = new MySQLCreateTableStatement(); createTableStatement.setNotExisted(false); when(materials.getDatabaseType()).thenReturn(new MySQLDatabaseType()); refresh(createTableStatement); } @Test public void refreshForOracle() throws SQLException { OracleCreateTableStatement createTableStatement = new OracleCreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new OracleDatabaseType()); refresh(createTableStatement); } @Test public void refreshForPostgreSQL() throws SQLException { PostgreSQLCreateTableStatement createTableStatement = new PostgreSQLCreateTableStatement(); createTableStatement.setNotExisted(false); when(materials.getDatabaseType()).thenReturn(new PostgreSQLDatabaseType()); refresh(createTableStatement); } @Test public void refreshForSQL92() throws SQLException { SQL92CreateTableStatement createTableStatement = new SQL92CreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new SQL92DatabaseType()); refresh(createTableStatement); } @Test public void refreshForSQLServer() throws SQLException { SQLServerCreateTableStatement createTableStatement = new SQLServerCreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new SQLServerDatabaseType()); refresh(createTableStatement); } }
class CreateTableStatementSchemaRefresherTest { private SchemaBuilderMaterials materials = mock(SchemaBuilderMaterials.class); @Test public void refreshForMySQL() throws SQLException { MySQLCreateTableStatement createTableStatement = new MySQLCreateTableStatement(); createTableStatement.setNotExisted(false); when(materials.getDatabaseType()).thenReturn(new MySQLDatabaseType()); refresh(createTableStatement); } @Test public void refreshForOracle() throws SQLException { OracleCreateTableStatement createTableStatement = new OracleCreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new OracleDatabaseType()); refresh(createTableStatement); } @Test public void refreshForPostgreSQL() throws SQLException { PostgreSQLCreateTableStatement createTableStatement = new PostgreSQLCreateTableStatement(); createTableStatement.setNotExisted(false); when(materials.getDatabaseType()).thenReturn(new PostgreSQLDatabaseType()); refresh(createTableStatement); } @Test public void refreshForSQL92() throws SQLException { SQL92CreateTableStatement createTableStatement = new SQL92CreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new SQL92DatabaseType()); refresh(createTableStatement); } @Test public void refreshForSQLServer() throws SQLException { SQLServerCreateTableStatement createTableStatement = new SQLServerCreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new SQLServerDatabaseType()); refresh(createTableStatement); } @Test public void refreshWithTableRuleForMySQL() throws SQLException { MySQLCreateTableStatement createTableStatement = new MySQLCreateTableStatement(); createTableStatement.setNotExisted(false); when(materials.getDatabaseType()).thenReturn(new MySQLDatabaseType()); refreshWithTableRule(createTableStatement); } @Test public void refreshWithTableRuleForOracle() throws SQLException { OracleCreateTableStatement createTableStatement = new OracleCreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new OracleDatabaseType()); refreshWithTableRule(createTableStatement); } @Test public void refreshWithTableRuleForPostgreSQL() throws SQLException { PostgreSQLCreateTableStatement createTableStatement = new PostgreSQLCreateTableStatement(); createTableStatement.setNotExisted(false); when(materials.getDatabaseType()).thenReturn(new PostgreSQLDatabaseType()); refreshWithTableRule(createTableStatement); } @Test public void refreshWithTableRuleForSQL92() throws SQLException { SQL92CreateTableStatement createTableStatement = new SQL92CreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new SQL92DatabaseType()); refreshWithTableRule(createTableStatement); } @Test public void refreshWithTableRuleForSQLServer() throws SQLException { SQLServerCreateTableStatement createTableStatement = new SQLServerCreateTableStatement(); when(materials.getDatabaseType()).thenReturn(new SQLServerDatabaseType()); refreshWithTableRule(createTableStatement); } private void refreshWithTableRule(final CreateTableStatement createTableStatement) throws SQLException { createTableStatement.setTable(new SimpleTableSegment(new TableNameSegment(1, 3, new IdentifierValue("t_order_0")))); ShardingSphereRule rule = mock(TableContainedRule.class); Collection<ShardingSphereRule> rules = Arrays.asList(rule); when(materials.getRules()).thenReturn(rules); when(((TableContainedRule) rule).getTables()).thenReturn(Arrays.asList("t_order_0")); Map<String, DataSource> dataSourceMap = mock(HashMap.class); when(materials.getDataSourceMap()).thenReturn(dataSourceMap); DataSource dataSource = mock(DataSource.class); when(dataSourceMap.get(eq("ds"))).thenReturn(dataSource); Connection connection = mock(Connection.class); when(dataSource.getConnection()).thenReturn(connection); DatabaseMetaData metaData = mock(DatabaseMetaData.class); when(connection.getMetaData()).thenReturn(metaData); ResultSet resultSet = mock(ResultSet.class); when(metaData.getTables(any(), any(), any(), any())).thenReturn(resultSet); when(resultSet.next()).thenReturn(false); ShardingSphereSchema schema = ShardingSphereSchemaBuildUtil.buildSchema(); SchemaRefresher<CreateTableStatement> schemaRefresher = new CreateTableStatementSchemaRefresher(); schemaRefresher.refresh(schema, Collections.singleton("ds"), createTableStatement, materials); assertTrue(schema.containsTable("t_order_0")); } }
jfyi we could also do: ``` headerMapper = simpleMapper .copy() .configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true); ``` https://static.javadoc.io/com.fasterxml.jackson.core/jackson-databind/2.9.8/com/fasterxml/jackson/databind/ObjectMapper.html#copy--
public JacksonAdapter() { simpleMapper = initializeObjectMapper(new ObjectMapper()); xmlMapper = initializeObjectMapper(new XmlMapper()); xmlMapper.configure(ToXmlGenerator.Feature.WRITE_XML_DECLARATION, true); xmlMapper.setDefaultUseWrapper(false); ObjectMapper flatteningMapper = initializeObjectMapper(new ObjectMapper()) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); mapper = initializeObjectMapper(new ObjectMapper()) .registerModule(AdditionalPropertiesSerializer.getModule(flatteningMapper)) .registerModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper)) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); headerMapper = initializeHeaderMapper(new ObjectMapper()); }
headerMapper = initializeHeaderMapper(new ObjectMapper());
public JacksonAdapter() { simpleMapper = initializeObjectMapper(new ObjectMapper()); xmlMapper = initializeObjectMapper(new XmlMapper()); xmlMapper.configure(ToXmlGenerator.Feature.WRITE_XML_DECLARATION, true); xmlMapper.setDefaultUseWrapper(false); ObjectMapper flatteningMapper = initializeObjectMapper(new ObjectMapper()) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); mapper = initializeObjectMapper(new ObjectMapper()) .registerModule(AdditionalPropertiesSerializer.getModule(flatteningMapper)) .registerModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper)) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); headerMapper = simpleMapper .copy() .configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true); }
class JacksonAdapter implements SerializerAdapter { private final ClientLogger logger = new ClientLogger(JacksonAdapter.class); /** * An instance of {@link ObjectMapper} to serialize/deserialize objects. */ private final ObjectMapper mapper; /** * An instance of {@link ObjectMapper} that does not do flattening. */ private final ObjectMapper simpleMapper; private final XmlMapper xmlMapper; private final ObjectMapper headerMapper; /* * The lazily-created serializer for this ServiceClient. */ private static SerializerAdapter serializerAdapter; /* * BOM header from some response bodies. To be removed in deserialization. */ private static final String BOM = "\uFEFF"; /** * Creates a new JacksonAdapter instance with default mapper settings. */ /** * Gets a static instance of {@link ObjectMapper} that doesn't handle flattening. * * @return an instance of {@link ObjectMapper}. */ protected ObjectMapper simpleMapper() { return simpleMapper; } /** * maintain singleton instance of the default serializer adapter. * * @return the default serializer */ public static synchronized SerializerAdapter createDefaultSerializerAdapter() { if (serializerAdapter == null) { serializerAdapter = new JacksonAdapter(); } return serializerAdapter; } /** * @return the original serializer type */ public ObjectMapper serializer() { return mapper; } @Override public String serialize(Object object, SerializerEncoding encoding) throws IOException { if (object == null) { return null; } StringWriter writer = new StringWriter(); if (encoding == SerializerEncoding.XML) { xmlMapper.writeValue(writer, object); } else { serializer().writeValue(writer, object); } return writer.toString(); } @Override public String serializeRaw(Object object) { if (object == null) { return null; } try { return serialize(object, SerializerEncoding.JSON).replaceAll("^\"*", "").replaceAll("\"*$", ""); } catch (IOException ex) { return null; } } @Override public String serializeList(List<?> list, CollectionFormat format) { if (list == null) { return null; } List<String> serialized = new ArrayList<>(); for (Object element : list) { String raw = serializeRaw(element); serialized.add(raw != null ? raw : ""); } return String.join(format.getDelimiter(), serialized); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, final Type type, SerializerEncoding encoding) throws IOException { if (value == null || value.isEmpty() || value.equals(BOM)) { return null; } if (value.startsWith(BOM)) { value = value.replaceFirst(BOM, ""); } final JavaType javaType = createJavaType(type); try { if (encoding == SerializerEncoding.XML) { return (T) xmlMapper.readValue(value, javaType); } else { return (T) serializer().readValue(value, javaType); } } catch (JsonParseException jpe) { throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe)); } } @Override public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException { if (deserializedHeadersType == null) { return null; } final String headersJsonString = headerMapper.writeValueAsString(headers); T deserializedHeaders = headerMapper.readValue(headersJsonString, createJavaType(deserializedHeadersType)); final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType); final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields(); for (final Field declaredField : declaredFields) { if (declaredField.isAnnotationPresent(HeaderCollection.class)) { final Type declaredFieldType = declaredField.getGenericType(); if (TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) { final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType); if (mapTypeArguments.length == 2 && mapTypeArguments[0] == String.class && mapTypeArguments[1] == String.class) { final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class); final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT); final int headerCollectionPrefixLength = headerCollectionPrefix.length(); if (headerCollectionPrefixLength > 0) { final Map<String, String> headerCollection = new HashMap<>(); for (final HttpHeader header : headers) { final String headerName = header.getName(); if (headerName.toLowerCase(Locale.ROOT).startsWith(headerCollectionPrefix)) { headerCollection.put(headerName.substring(headerCollectionPrefixLength), header.getValue()); } } final boolean declaredFieldAccessibleBackup = declaredField.isAccessible(); try { if (!declaredFieldAccessibleBackup) { declaredField.setAccessible(true); } declaredField.set(deserializedHeaders, headerCollection); } catch (IllegalAccessException ignored) { } finally { if (!declaredFieldAccessibleBackup) { declaredField.setAccessible(declaredFieldAccessibleBackup); } } } } } } } return deserializedHeaders; } /** * Initializes an instance of JacksonMapperAdapter with default configurations * applied to the object mapper. * * @param mapper the object mapper to use. */ private static <T extends ObjectMapper> T initializeObjectMapper(T mapper) { mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, true) .configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false) .configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true) .setSerializationInclusion(JsonInclude.Include.NON_NULL) .registerModule(new JavaTimeModule()) .registerModule(ByteArraySerializer.getModule()) .registerModule(Base64UrlSerializer.getModule()) .registerModule(DateTimeSerializer.getModule()) .registerModule(DateTimeRfc1123Serializer.getModule()) .registerModule(DurationSerializer.getModule()) .registerModule(HttpHeadersSerializer.getModule()); mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker() .withFieldVisibility(JsonAutoDetect.Visibility.ANY) .withSetterVisibility(JsonAutoDetect.Visibility.NONE) .withGetterVisibility(JsonAutoDetect.Visibility.NONE) .withIsGetterVisibility(JsonAutoDetect.Visibility.NONE)); return mapper; } private static <T extends ObjectMapper> T initializeHeaderMapper(T mapper) { initializeObjectMapper(mapper); mapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true); return mapper; } private JavaType createJavaType(Type type) { JavaType result; if (type == null) { result = null; } else if (type instanceof JavaType) { result = (JavaType) type; } else if (type instanceof ParameterizedType) { final ParameterizedType parameterizedType = (ParameterizedType) type; final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length]; for (int i = 0; i != actualTypeArguments.length; i++) { javaTypeArguments[i] = createJavaType(actualTypeArguments[i]); } result = mapper .getTypeFactory().constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments); } else { result = mapper .getTypeFactory().constructType(type); } return result; } }
class JacksonAdapter implements SerializerAdapter { private final ClientLogger logger = new ClientLogger(JacksonAdapter.class); /** * An instance of {@link ObjectMapper} to serialize/deserialize objects. */ private final ObjectMapper mapper; /** * An instance of {@link ObjectMapper} that does not do flattening. */ private final ObjectMapper simpleMapper; private final XmlMapper xmlMapper; private final ObjectMapper headerMapper; /* * The lazily-created serializer for this ServiceClient. */ private static SerializerAdapter serializerAdapter; /* * BOM header from some response bodies. To be removed in deserialization. */ private static final String BOM = "\uFEFF"; /** * Creates a new JacksonAdapter instance with default mapper settings. */ /** * Gets a static instance of {@link ObjectMapper} that doesn't handle flattening. * * @return an instance of {@link ObjectMapper}. */ protected ObjectMapper simpleMapper() { return simpleMapper; } /** * maintain singleton instance of the default serializer adapter. * * @return the default serializer */ public static synchronized SerializerAdapter createDefaultSerializerAdapter() { if (serializerAdapter == null) { serializerAdapter = new JacksonAdapter(); } return serializerAdapter; } /** * @return the original serializer type */ public ObjectMapper serializer() { return mapper; } @Override public String serialize(Object object, SerializerEncoding encoding) throws IOException { if (object == null) { return null; } StringWriter writer = new StringWriter(); if (encoding == SerializerEncoding.XML) { xmlMapper.writeValue(writer, object); } else { serializer().writeValue(writer, object); } return writer.toString(); } @Override public String serializeRaw(Object object) { if (object == null) { return null; } try { return serialize(object, SerializerEncoding.JSON).replaceAll("^\"*", "").replaceAll("\"*$", ""); } catch (IOException ex) { return null; } } @Override public String serializeList(List<?> list, CollectionFormat format) { if (list == null) { return null; } List<String> serialized = new ArrayList<>(); for (Object element : list) { String raw = serializeRaw(element); serialized.add(raw != null ? raw : ""); } return String.join(format.getDelimiter(), serialized); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, final Type type, SerializerEncoding encoding) throws IOException { if (value == null || value.isEmpty() || value.equals(BOM)) { return null; } if (value.startsWith(BOM)) { value = value.replaceFirst(BOM, ""); } final JavaType javaType = createJavaType(type); try { if (encoding == SerializerEncoding.XML) { return (T) xmlMapper.readValue(value, javaType); } else { return (T) serializer().readValue(value, javaType); } } catch (JsonParseException jpe) { throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe)); } } @Override public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException { if (deserializedHeadersType == null) { return null; } final String headersJsonString = headerMapper.writeValueAsString(headers); T deserializedHeaders = headerMapper.readValue(headersJsonString, createJavaType(deserializedHeadersType)); final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType); final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields(); for (final Field declaredField : declaredFields) { if (declaredField.isAnnotationPresent(HeaderCollection.class)) { final Type declaredFieldType = declaredField.getGenericType(); if (TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) { final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType); if (mapTypeArguments.length == 2 && mapTypeArguments[0] == String.class && mapTypeArguments[1] == String.class) { final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class); final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT); final int headerCollectionPrefixLength = headerCollectionPrefix.length(); if (headerCollectionPrefixLength > 0) { final Map<String, String> headerCollection = new HashMap<>(); for (final HttpHeader header : headers) { final String headerName = header.getName(); if (headerName.toLowerCase(Locale.ROOT).startsWith(headerCollectionPrefix)) { headerCollection.put(headerName.substring(headerCollectionPrefixLength), header.getValue()); } } final boolean declaredFieldAccessibleBackup = declaredField.isAccessible(); try { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaredField.setAccessible(true); return null; }); } declaredField.set(deserializedHeaders, headerCollection); } catch (IllegalAccessException ignored) { } finally { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaredField.setAccessible(declaredFieldAccessibleBackup); return null; }); } } } } } } } return deserializedHeaders; } /** * Initializes an instance of JacksonMapperAdapter with default configurations * applied to the object mapper. * * @param mapper the object mapper to use. */ private static <T extends ObjectMapper> T initializeObjectMapper(T mapper) { mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, true) .configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false) .configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true) .setSerializationInclusion(JsonInclude.Include.NON_NULL) .registerModule(new JavaTimeModule()) .registerModule(ByteArraySerializer.getModule()) .registerModule(Base64UrlSerializer.getModule()) .registerModule(DateTimeSerializer.getModule()) .registerModule(DateTimeRfc1123Serializer.getModule()) .registerModule(DurationSerializer.getModule()) .registerModule(HttpHeadersSerializer.getModule()); mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker() .withFieldVisibility(JsonAutoDetect.Visibility.ANY) .withSetterVisibility(JsonAutoDetect.Visibility.NONE) .withGetterVisibility(JsonAutoDetect.Visibility.NONE) .withIsGetterVisibility(JsonAutoDetect.Visibility.NONE)); return mapper; } private JavaType createJavaType(Type type) { JavaType result; if (type == null) { result = null; } else if (type instanceof JavaType) { result = (JavaType) type; } else if (type instanceof ParameterizedType) { final ParameterizedType parameterizedType = (ParameterizedType) type; final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length]; for (int i = 0; i != actualTypeArguments.length; i++) { javaTypeArguments[i] = createJavaType(actualTypeArguments[i]); } result = mapper .getTypeFactory().constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments); } else { result = mapper .getTypeFactory().constructType(type); } return result; } }
it is better to return 0 directly. Objects.hash() will return 0.
public int hashCode() { return Objects.hash(); }
return Objects.hash();
public int hashCode() { return 0; }
class DataType { /** * Convert data type in Doris catalog to data type in Nereids. * TODO: throw exception when cannot convert catalog type to Nereids type * * @param catalogType data type in Doris catalog * @return data type in Nereids */ public static DataType convertFromCatalogDataType(Type catalogType) { if (catalogType instanceof ScalarType) { ScalarType scalarType = (ScalarType) catalogType; switch (scalarType.getPrimitiveType()) { case BOOLEAN: return BooleanType.INSTANCE; case INT: return IntegerType.INSTANCE; case BIGINT: return BigIntType.INSTANCE; case DOUBLE: return DoubleType.INSTANCE; case VARCHAR: return VarcharType.createVarcharType(scalarType.getLength()); case STRING: return StringType.INSTANCE; case NULL_TYPE: return NullType.INSTANCE; default: throw new AnalysisException("Nereids do not support type: " + scalarType.getPrimitiveType()); } } else if (catalogType instanceof MapType) { throw new AnalysisException("Nereids do not support map type."); } else if (catalogType instanceof StructType) { throw new AnalysisException("Nereids do not support struct type."); } else if (catalogType instanceof ArrayType) { throw new AnalysisException("Nereids do not support array type."); } else if (catalogType instanceof MultiRowType) { throw new AnalysisException("Nereids do not support multi row type."); } else { throw new AnalysisException("Nereids do not support type: " + catalogType); } } public abstract Type toCatalogDataType(); @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } return true; } @Override }
class DataType { /** * Convert data type in Doris catalog to data type in Nereids. * TODO: throw exception when cannot convert catalog type to Nereids type * * @param catalogType data type in Doris catalog * @return data type in Nereids */ public static DataType convertFromCatalogDataType(Type catalogType) { if (catalogType instanceof ScalarType) { ScalarType scalarType = (ScalarType) catalogType; switch (scalarType.getPrimitiveType()) { case BOOLEAN: return BooleanType.INSTANCE; case INT: return IntegerType.INSTANCE; case BIGINT: return BigIntType.INSTANCE; case DOUBLE: return DoubleType.INSTANCE; case VARCHAR: return VarcharType.createVarcharType(scalarType.getLength()); case STRING: return StringType.INSTANCE; case NULL_TYPE: return NullType.INSTANCE; default: throw new AnalysisException("Nereids do not support type: " + scalarType.getPrimitiveType()); } } else if (catalogType instanceof MapType) { throw new AnalysisException("Nereids do not support map type."); } else if (catalogType instanceof StructType) { throw new AnalysisException("Nereids do not support struct type."); } else if (catalogType instanceof ArrayType) { throw new AnalysisException("Nereids do not support array type."); } else if (catalogType instanceof MultiRowType) { throw new AnalysisException("Nereids do not support multi row type."); } else { throw new AnalysisException("Nereids do not support type: " + catalogType); } } public abstract Type toCatalogDataType(); @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } return true; } @Override }